From 567bdb4858dba48645528fe23c351926eb689a7a Mon Sep 17 00:00:00 2001 From: MSG <59928086+MSghais@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:19:11 +0200 Subject: [PATCH] Clean/refacto (#26) * refacto home page form with hook and state * clean program + internal + fetch + rename * clean comment * cargo fmt * clean and refacto * cargo fmt --- .../src/app/components/InternalProgram.tsx | 256 ++----------- .../src/app/components/ProgramCard.tsx | 253 ++----------- .../src/app/components/description/index.tsx | 1 - .../src/app/config-marketplace/page.tsx | 5 - .../src/app/launch-program/page.tsx | 54 +-- askeladd-dvm-marketplace/src/app/page.tsx | 349 ++---------------- .../src/app/stwo-program/page.tsx | 143 +------ .../src/app/utils/generateAppHandler.ts | 9 +- .../src/constants/program.ts | 14 +- .../src/hooks/useDVMState.ts | 299 +++++++++++++++ .../src/hooks/useFetchEvents.ts | 70 +++- .../src/hooks/useSubmitJob.ts | 26 ++ askeladd-dvm-marketplace/src/types/index.ts | 115 +++--- crates/cli/src/dvm_customer.rs | 8 +- crates/core/src/db.rs | 2 - crates/core/src/dvm/customer.rs | 16 +- crates/core/src/dvm/mod.rs | 30 +- crates/core/src/dvm/service_provider.rs | 78 ++-- crates/core/src/prover_service.rs | 30 +- crates/core/src/utils.rs | 51 --- crates/core/src/verifier_service.rs | 10 +- .../src/{fibonnaci => fibonacci}/air.rs | 0 .../src/{fibonnaci => fibonacci}/component.rs | 0 .../src/{fibonnaci => fibonacci}/mod.rs | 16 - .../multi_fibonacci.rs | 97 +---- crates/stwo_wasm/src/lib.rs | 9 +- .../{wide_fibonnacci.rs => wide_fibonacci.rs} | 12 +- 27 files changed, 674 insertions(+), 1279 deletions(-) create mode 100644 askeladd-dvm-marketplace/src/hooks/useDVMState.ts create mode 100644 askeladd-dvm-marketplace/src/hooks/useSubmitJob.ts rename crates/stwo_wasm/src/{fibonnaci => fibonacci}/air.rs (100%) rename crates/stwo_wasm/src/{fibonnaci => fibonacci}/component.rs (100%) rename crates/stwo_wasm/src/{fibonnaci => fibonacci}/mod.rs (94%) rename crates/stwo_wasm/src/{fibonnaci => fibonacci}/multi_fibonacci.rs (55%) rename crates/stwo_wasm/src/{wide_fibonnacci.rs => wide_fibonacci.rs} (97%) diff --git a/askeladd-dvm-marketplace/src/app/components/InternalProgram.tsx b/askeladd-dvm-marketplace/src/app/components/InternalProgram.tsx index 6128958..0446c49 100644 --- a/askeladd-dvm-marketplace/src/app/components/InternalProgram.tsx +++ b/askeladd-dvm-marketplace/src/app/components/InternalProgram.tsx @@ -6,42 +6,27 @@ import { useFetchEvents } from '@/hooks/useFetchEvents'; import { ASKELADD_RELAY } from '@/constants/relay'; import init, { verify_stark_proof, verify_stark_proof_wide_fibo, prove_and_verify, stark_proof_wide_fibo, prove_stark_proof_poseidon, verify_stark_proof_poseidon, prove_and_verify_fib, verify_stark_proof_fib } from "../../pkg" import { useNostrContext } from '@/context/NostrContext'; +import { useDVMState } from '@/hooks/useDVMState'; // Define the props for the component interface TagsCardProps { event?: NDKEvent | NostrEvent; // Array of array of strings zkp_request?: IGenerateZKPRequestDVM } const InternalProgram: React.FC = ({ event, zkp_request }) => { - const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() const { ndk, pool } = useNostrContext() const [form, setForm] = useState({}) const program = zkp_request?.program; - const [isOpenForm, setIsOpenForm] = useState(false) const [logSize, setLogSize] = useState(5); const [claim, setClaim] = useState(443693538); const [publicKey, setPublicKey] = useState(); - const [jobId, setJobId] = useState(); const [error, setError] = useState() - const [starkProof, setStarkProof] = useState() const [jobEventResult, setJobEventResult] = useState() - const [seeTag, setSeeTag] = useState(false) - const [proof, setProof] = useState(null); const [isLoading, setIsLoading] = useState(false); const [isInitialized, setIsInitialized] = useState(false); const [isFetchJob, setIsFetchJob] = useState(false); - const [isLoadingJobResult, setIsLoadingJobResult] = useState(false); const [isWaitingJob, setIsWaitingJob] = useState(false); - const [timestampJob, setTimestampJob] = useState(); - const [proofStatus, setProofStatus] = useState< - "idle" | "pending" | "received" | "verified" - >("idle"); - const [selectedEvent, setSelectedEvent] = useState() - - let eventIdRequest = useMemo(() => { - return jobId - }, [jobId]) - + const {jobId, fetchJobRequest, fetchEventsProof, starkProof, submitJob: submitJobModular, proof, proofStatus, setProof, setProofStatus } = useDVMState() // Init wasm module to run_fibonacci_verify useEffect(() => { init() @@ -53,97 +38,13 @@ const InternalProgram: React.FC = ({ event, zkp_request }) => { }, []); useEffect(() => { - // const pool = new SimplePool(); - if (pool) { - runSubscriptionEvent(pool) - } + if (!jobId && !jobEventResult) { timeoutWaitingForJobResult() } }, [jobId, jobEventResult, pool]) - const runSubscriptionEvent = (pool: SimplePool, pubkey?: string) => { - - // WebSocket connection setup - // const ws = new WebSocket([ASKELADD_RELAY[0]]); // Replace with your Nostr relay URL - - // ws.onopen = () => { - // // Subscribe to specific events, adjust filters as needed - // ws.send(JSON.stringify({ - // "req": "EVENTS", - // // "filter": { - // // "#e": ["3a5f5b4..."] // Your event criteria here - // // } - // })); - // }; - - // ws.onmessage = (event) => { - // const data = JSON.parse(event.data); - // if (data) { - // if (!jobId) return; - // if (pubkey && data?.pubkey == pubkey) { - // setJobId(data?.id) - // } - // // setEvents(currentEvents => [...currentEvents, data]); - // } - // }; - - // ws.onerror = (error) => { - // console.error("WebSocket error:", error); - // }; - - let poolSubscription = pool.subscribeMany( - ASKELADD_RELAY, - [ - // { - // kinds: [KIND_JOB_REQUEST as NDKKind], - // // since:timestampJob - // // authors: pubkey ? [pubkey] : [] - // }, - { - kinds: [KIND_JOB_RESULT as NDKKind], - // since:timestampJob - }, - ], - { - onevent(event) { - // if (event?.kind == KIND_JOB_REQUEST) { - // if (!jobId) return; - // if (pubkey && event?.pubkey == pubkey) { - // setJobId(event?.id) - // } - // poolSubscription.close(); - // } - if (event?.kind == KIND_JOB_RESULT) { - if (!jobId) return; - let id = jobId ?? eventIdRequest; - if (id && !jobEventResult) { - console.log("Event job result received: ", event?.id); - console.log("event job content result include job: ", id); - let isIncludedJobId = event?.content?.includes(jobId) - let jobEventResultFind = event?.content?.includes(jobId) - console.log("isIncludedJobId", isIncludedJobId); - if (isIncludedJobId) { - console.log("Event JOB_RESULT find", jobEventResultFind); - getDataOfEvent(event); - setJobEventResult(event) - } - } - poolSubscription.close(); - } - }, - onclose: () => { - poolSubscription.close() - }, - oneose() { - poolSubscription.close() - } - } - ) - } - - const timeoutWaitingForJobResult = async () => { console.log("waiting timeout job result") setTimeout(() => { @@ -159,69 +60,6 @@ const InternalProgram: React.FC = ({ event, zkp_request }) => { setIsWaitingJob(false) } - const fetchEventsProof = async () => { - console.log("fetch events job result proof") - // if(jobEventResult && jobId)return; - setIsFetchJob(false); - setIsLoadingJobResult(true); - const { events } = await fetchEventsTools({ - kind: KIND_JOB_RESULT, - // since: timestampJob, - // search: jobId - // search: `#${jobId}`, - }) - console.log("events job result", events); - if (!events) return; - let lastEvent = events[events?.length - 1] - if (!lastEvent) return; - let id = jobId ?? eventIdRequest; - if (jobEventResult && jobEventResult?.id == id && proof && proofStatus != "pending") return; - if (id && !jobEventResult) { - let jobEventResultFind = events?.find((e) => e?.content?.includes(id)) - console.log("jobEventResultFind", jobEventResultFind); - let filterJob = events?.filter((e) => e?.id?.includes(id)) - // console.log("filterJob", filterJob); - if (jobEventResultFind?.id) { - console.log("Event JOB_RESULT find", jobEventResultFind); - getDataOfEvent(jobEventResultFind); - setJobEventResult(jobEventResultFind) - } - } - } - - const getDataOfEvent = (lastEvent?: NDKEvent | NostrEvent) => { - if (!lastEvent || !lastEvent?.content) return; - setSelectedEvent(lastEvent); - setProof(lastEvent?.content?.toString()) - const jobProofSerialize: any = JSON.parse(lastEvent?.content) - console.log('jobProofSerialize serialize', jobProofSerialize); - const proofSerialize = jobProofSerialize?.response?.proof; - console.log('proof serialize', proofSerialize); - setStarkProof(proofSerialize); - setProofStatus("received"); - return proofSerialize - } - - const fetchJobRequest = async (pubkey?: string) => { - const { events } = await fetchEventsTools({ - kind: KIND_JOB_REQUEST, - since: timestampJob, - // authors: pubkey ? [pubkey] : [] - }); - console.log("events job request", events); - if (!events) return; - const lastEvent = events[0] - if (!lastEvent?.id) return; - const lastEventId = lastEvent?.id; - if (pubkey && pubkey == lastEvent?.pubkey) { - console.log("lastEventId", lastEventId) - setJobId(lastEventId); - eventIdRequest = lastEventId; - setIsWaitingJob(true) - } - } - - /** Submit job with JOB_REQUEST 5600 * - Use extension NIP-7 * - Default public key demo @@ -229,20 +67,16 @@ const InternalProgram: React.FC = ({ event, zkp_request }) => { */ const submitJob = async () => { try { - /** Todo better check */ if (!isLoading && !isOpenForm && Object.entries(form).length == 0) return; setIsLoading(true); setIsFetchJob(false); - setJobId(undefined) setProofStatus("pending"); setProof(null); setJobEventResult(undefined); setError(undefined); + let tags: string[][] = [ - // ['param', 'log_size', logSize.toString()], - // ['param', 'claim', claim.toString()], - // ['output', 'text/json'] ]; const inputs: Map = new Map(); @@ -257,61 +91,34 @@ const InternalProgram: React.FC = ({ event, zkp_request }) => { tags.push(["param", key, value]) } console.log("inputs", Object.fromEntries(inputs)) - const content = JSON.stringify({ - request: form, + console.log("inputs", inputs) + let job_request: IGenerateZKPRequestDVM = { + // request: form, + request: Object.fromEntries(inputs), program: { contract_name: zkp_request?.program?.contract_name, internal_contract_name: zkp_request?.program?.internal_contract_name, contract_reached: zkp_request?.program?.contract_reached, - inputs: Object.fromEntries(inputs), - inputs_types: undefined, - inputs_encrypted: undefined + inputs: inputs, + // inputs_types: undefined, + // inputs_encrypted: undefined } - }) - // Define the timestamp before which you want to fetch events - setTimestampJob(new Date().getTime()) - console.log("inputs", inputs) - console.log("content", content) - /** Use Nostr extension to send event */ - const pool = new SimplePool(); - if (typeof window !== "undefined" && window.nostr) { - const pubkey = await window.nostr.getPublicKey(); - let created_at = new Date().getTime(); - setPublicKey(pubkey) - const event = await window.nostr.signEvent({ - pubkey: pubkey, - created_at: created_at, - kind: 5600, - tags: tags, - content: content - }) // takes an event object, adds `id`, `pubkey` and `sig` and returns it - // Setup job request to fetch job id + } - /** @TODO why the event id is not return? - * - get the last event and fetch job_id event - * - check if events is sent with subscription - * - */ - // let eventID = await relay.publish(event as EventNostr); - const eventID = await Promise.any(pool.publish(ASKELADD_RELAY, event as NostrEvent)); - console.log("eventID", eventID[0]) - await fetchJobRequest(pubkey) - setIsWaitingJob(true); - await timeoutWaitingForJobResult() + const content = JSON.stringify(job_request) + console.log("content", content) - } else { + let res = await submitJobModular(5600, + Object.fromEntries(inputs), + job_request, + tags - /** @TODO flow is user doesn't have NIP-07 extension */ - // let { result, event } = await sendNote({ content, tags, kind: 5600 }) - // console.log("event", event) - // if (event?.sig) { - // setJobId(event?.sig); - // } - // setIsWaitingJob(true) - /** NDK event - * Generate or import private key after - */ + ) + if(res && res?.success) { + fetchJobRequest() + fetchEventsProof() } + return res; } catch (e) { } finally { setIsLoading(false); @@ -321,7 +128,7 @@ const InternalProgram: React.FC = ({ event, zkp_request }) => { const verifyProofHandler = async () => { try { - if (proof) { + if (proof && starkProof) { setIsLoading(true); const inputs: Map = new Map(); { @@ -330,14 +137,15 @@ const InternalProgram: React.FC = ({ event, zkp_request }) => { } ) } + console.log("wide fibo prove_result", starkProof); - if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.WideFibonnaciProvingRequest) { + if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.WideFibonacciProvingRequest) { let log_n_instances = inputs.get("log_n_instances"); let log_fibonacci_size = inputs.get("log_fibonacci_size"); - if (!log_n_instances && !log_fibonacci_size) return; + const serialised_proof_from_nostr_event = JSON.stringify(starkProof); + if (!log_n_instances && !log_fibonacci_size && !serialised_proof_from_nostr_event) return; const prove_result = stark_proof_wide_fibo(Number(log_fibonacci_size), Number(log_n_instances)); console.log("wide fibo prove_result", prove_result); - const serialised_proof_from_nostr_event = JSON.stringify(starkProof); console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); const verify_result = verify_stark_proof_wide_fibo(Number(log_fibonacci_size), Number(log_n_instances), serialised_proof_from_nostr_event); console.log("verify result", verify_result); @@ -369,15 +177,12 @@ const InternalProgram: React.FC = ({ event, zkp_request }) => { setError(verify_result?.message) } } - else if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.FibonnacciProvingRequest) { + else if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.FibonacciProvingRequest) { const prove_result = prove_and_verify_fib(logSize, claim); console.log("prove_result", prove_result); const serialised_proof_from_nostr_event = JSON.stringify(starkProof); console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); const verify_result = verify_stark_proof_fib(logSize, claim, serialised_proof_from_nostr_event); - console.log("verify result", verify_result); - console.log("verify message", verify_result.message); - console.log("verify success", verify_result.success); if (verify_result?.success) { console.log("is success verify result") setProofStatus("verified"); @@ -398,10 +203,7 @@ const InternalProgram: React.FC = ({ event, zkp_request }) => { } }; - const date: string | undefined = event?.created_at ? new Date(event?.created_at).toDateString() : undefined - const params = Object.fromEntries(zkp_request?.program?.inputs?.entries() ?? []) - // Handle changes in form inputs const handleChange = (e: React.ChangeEvent) => { const { name, value } = e.target; diff --git a/askeladd-dvm-marketplace/src/app/components/ProgramCard.tsx b/askeladd-dvm-marketplace/src/app/components/ProgramCard.tsx index 7150512..45978fd 100644 --- a/askeladd-dvm-marketplace/src/app/components/ProgramCard.tsx +++ b/askeladd-dvm-marketplace/src/app/components/ProgramCard.tsx @@ -6,49 +6,33 @@ import { useFetchEvents } from '@/hooks/useFetchEvents'; import { ASKELADD_RELAY } from '@/constants/relay'; import init, { verify_stark_proof, verify_stark_proof_wide_fibo, prove_and_verify, stark_proof_wide_fibo, prove_stark_proof_poseidon, verify_stark_proof_poseidon, prove_and_verify_fib, verify_stark_proof_fib } from "../../pkg" import { useNostrContext } from '@/context/NostrContext'; +import { useDVMState } from '@/hooks/useDVMState'; // Define the props for the component interface TagsCardProps { event?: NDKEvent | NostrEvent; // Array of array of strings zkp_request?: IGenerateZKPRequestDVM } const ProgramCard: React.FC = ({ event, zkp_request }) => { - // console.log("zkp_request config", zkp_request) - const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() const { ndk, pool } = useNostrContext() const inputs = zkp_request?.program?.inputs const [form, setForm] = useState(zkp_request?.request ? zkp_request?.request : inputs ? inputs : {}) - // const [form, setForm] = useState(zkp_request?.request ? zkp_request?.request : inputs ? Object.fromEntries(inputs) : {}) const [requestTemplate, setRequestTemplate] = useState(zkp_request?.request ? zkp_request?.request : inputs ? Object.fromEntries(inputs) : {}) - // const [requestValue, setRequetValue] = useState(inputs ? inputs Object.fromEntries(inputs) : {}) const [requestValue, setRequetValue] = useState(inputs ? inputs : {}) const [isOpenForm, setIsOpenForm] = useState(false) const [logSize, setLogSize] = useState(5); const [claim, setClaim] = useState(443693538); - const [publicKey, setPublicKey] = useState(); const [jobId, setJobId] = useState(); const [error, setError] = useState() - const [starkProof, setStarkProof] = useState() const [jobEventResult, setJobEventResult] = useState() - const [seeTag, setSeeTag] = useState(false) - const [proof, setProof] = useState(null); const [isLoading, setIsLoading] = useState(false); const [isInitialized, setIsInitialized] = useState(false); const [isFetchJob, setIsFetchJob] = useState(false); - const [isLoadingJobResult, setIsLoadingJobResult] = useState(false); const [isWaitingJob, setIsWaitingJob] = useState(false); - const [timestampJob, setTimestampJob] = useState(); - const [proofStatus, setProofStatus] = useState< - "idle" | "pending" | "received" | "verified" - >("idle"); - const [selectedEvent, setSelectedEvent] = useState() + const { fetchEventsProof, fetchJobRequest, starkProof, submitJob: submitJobModular, proof, proofStatus, setProof, setProofStatus } = useDVMState() const program = zkp_request?.program; const contract_reached = zkp_request?.program?.contract_reached; - let eventIdRequest = useMemo(() => { - return jobId - }, [jobId]) - // Init wasm module to run_fibonacci_verify useEffect(() => { init() @@ -70,87 +54,6 @@ const ProgramCard: React.FC = ({ event, zkp_request }) => { }, [jobId, jobEventResult, pool]) - // const runSubscriptionEvent = (pool: SimplePool, pubkey?: string) => { - - // // WebSocket connection setup - // // const ws = new WebSocket([ASKELADD_RELAY[0]]); // Replace with your Nostr relay URL - - // // ws.onopen = () => { - // // // Subscribe to specific events, adjust filters as needed - // // ws.send(JSON.stringify({ - // // "req": "EVENTS", - // // // "filter": { - // // // "#e": ["3a5f5b4..."] // Your event criteria here - // // // } - // // })); - // // }; - - // // ws.onmessage = (event) => { - // // const data = JSON.parse(event.data); - // // if (data) { - // // if (!jobId) return; - // // if (pubkey && data?.pubkey == pubkey) { - // // setJobId(data?.id) - // // } - // // // setEvents(currentEvents => [...currentEvents, data]); - // // } - // // }; - - // // ws.onerror = (error) => { - // // console.error("WebSocket error:", error); - // // }; - - // let poolSubscription = pool.subscribeMany( - // ASKELADD_RELAY, - // [ - // // { - // // kinds: [KIND_JOB_REQUEST as NDKKind], - // // // since:timestampJob - // // // authors: pubkey ? [pubkey] : [] - // // }, - // { - // kinds: [KIND_JOB_RESULT as NDKKind], - // // since:timestampJob - // }, - // ], - // { - // onevent(event) { - // // if (event?.kind == KIND_JOB_REQUEST) { - // // if (!jobId) return; - // // if (pubkey && event?.pubkey == pubkey) { - // // setJobId(event?.id) - // // } - // // poolSubscription.close(); - // // } - // if (event?.kind == KIND_JOB_RESULT) { - // if (!jobId) return; - // let id = jobId ?? eventIdRequest; - // if (id && !jobEventResult) { - // console.log("Event job result received: ", event?.id); - // console.log("event job content result include job: ", id); - // let isIncludedJobId = event?.content?.includes(jobId) - // let jobEventResultFind = event?.content?.includes(jobId) - // console.log("isIncludedJobId", isIncludedJobId); - // if (isIncludedJobId) { - // console.log("Event JOB_RESULT find", jobEventResultFind); - // getDataOfEvent(event); - // setJobEventResult(event) - // } - // } - // poolSubscription.close(); - // } - // }, - // onclose: () => { - // poolSubscription.close() - // }, - // oneose() { - // poolSubscription.close() - // } - // } - // ) - // } - - const timeoutWaitingForJobResult = async () => { console.log("waiting timeout job result") setTimeout(() => { @@ -166,69 +69,6 @@ const ProgramCard: React.FC = ({ event, zkp_request }) => { setIsWaitingJob(false) } - const fetchEventsProof = async () => { - console.log("fetch events job result proof") - // if(jobEventResult && jobId)return; - setIsFetchJob(false); - setIsLoadingJobResult(true); - const { events } = await fetchEventsTools({ - kind: KIND_JOB_RESULT, - // since: timestampJob, - // search: jobId - // search: `#${jobId}`, - }) - // console.log("events job result", events); - if (!events) return; - let lastEvent = events[events?.length - 1] - if (!lastEvent) return; - let id = jobId ?? eventIdRequest; - if (jobEventResult && jobEventResult?.id == id && proof && proofStatus != "pending") return; - if (id && !jobEventResult) { - let jobEventResultFind = events?.find((e) => e?.content?.includes(id)) - console.log("jobEventResultFind", jobEventResultFind); - let filterJob = events?.filter((e) => e?.id?.includes(id)) - // console.log("filterJob", filterJob); - if (jobEventResultFind?.id) { - console.log("Event JOB_RESULT find", jobEventResultFind); - getDataOfEvent(jobEventResultFind); - setJobEventResult(jobEventResultFind) - } - } - } - - const getDataOfEvent = (lastEvent?: NDKEvent | NostrEvent) => { - if (!lastEvent || !lastEvent?.content) return; - setSelectedEvent(lastEvent); - setProof(lastEvent?.content?.toString()) - const jobProofSerialize: any = JSON.parse(lastEvent?.content) - console.log('jobProofSerialize serialize', jobProofSerialize); - const proofSerialize = jobProofSerialize?.response?.proof; - console.log('proof serialize', proofSerialize); - setStarkProof(proofSerialize); - setProofStatus("received"); - return proofSerialize - } - - const fetchJobRequest = async (pubkey?: string) => { - const { events } = await fetchEventsTools({ - kind: KIND_JOB_REQUEST, - since: timestampJob, - // authors: pubkey ? [pubkey] : [] - }); - console.log("events job request", events); - if (!events) return; - const lastEvent = events[0] - if (!lastEvent?.id) return; - const lastEventId = lastEvent?.id; - if (pubkey && pubkey == lastEvent?.pubkey) { - console.log("lastEventId", lastEventId) - setJobId(lastEventId); - eventIdRequest = lastEventId; - setIsWaitingJob(true) - } - } - - /** Submit job with JOB_REQUEST 5600 * - Use extension NIP-7 * - Default public key demo @@ -242,8 +82,8 @@ const ProgramCard: React.FC = ({ event, zkp_request }) => { setIsLoading(true); setIsFetchJob(false); setJobId(undefined) - setProofStatus("pending"); - setProof(null); + // setProofStatus("pending"); + // setProof(null); setJobEventResult(undefined); setError(undefined); let tags: string[][] = [ @@ -268,62 +108,29 @@ const ProgramCard: React.FC = ({ event, zkp_request }) => { tags.push(["param", key, value]) } console.log("inputs", Object.fromEntries(inputs)) - const content = JSON.stringify({ + + let job_request: IGenerateZKPRequestDVM = { // request: form, request: Object.fromEntries(inputs), program: { contract_name: zkp_request?.program?.contract_name, internal_contract_name: zkp_request?.program?.internal_contract_name, contract_reached: zkp_request?.program?.contract_reached, - inputs: Object.fromEntries(inputs), - inputs_types: undefined, - inputs_encrypted: undefined + // inputs: Object.fromEntries(inputs), + inputs: inputs, + // inputs_types: undefined, + // inputs_encrypted: undefined } - }) - console.log("content", content) - // Define the timestamp before which you want to fetch events - setTimestampJob(new Date().getTime()) - console.log("inputs", inputs) - /** Use Nostr extension to send event */ - const pool = new SimplePool(); - if (typeof window !== "undefined" && window.nostr) { - const pubkey = await window.nostr.getPublicKey(); - let created_at = new Date().getTime(); - setPublicKey(pubkey) - const event = await window.nostr.signEvent({ - pubkey: pubkey, - created_at: created_at, - kind: 5600, - tags: tags, - content: content - }) // takes an event object, adds `id`, `pubkey` and `sig` and returns it - // Setup job request to fetch job id - - /** @TODO why the event id is not return? - * - get the last event and fetch job_id event - * - check if events is sent with subscription - * - */ - // let eventID = await relay.publish(event as EventNostr); - const eventID = await Promise.any(pool.publish(ASKELADD_RELAY, event as NostrEvent)); - console.log("eventID", eventID[0]) - // await fetchJobRequest(pubkey) - // setIsWaitingJob(true); - // await timeoutWaitingForJobResult() - - } else { - - /** @TODO flow is user doesn't have NIP-07 extension */ - // let { result, event } = await sendNote({ content, tags, kind: 5600 }) - // console.log("event", event) - // if (event?.sig) { - // setJobId(event?.sig); - // } - // setIsWaitingJob(true) - /** NDK event - * Generate or import private key after - */ } + + const content = JSON.stringify(job_request) + let res = await submitJobModular(5600, + Object.fromEntries(inputs), + job_request, + tags + + ) + return res; } catch (e) { } finally { setIsLoading(false); @@ -335,16 +142,30 @@ const ProgramCard: React.FC = ({ event, zkp_request }) => { try { if (proof) { setIsLoading(true); + // const inputs: Map = new Map(); + // { + // Object.entries(form).map(([key, value]) => { + // inputs.set(key, value as string) + // } + // ) + // } + const inputs: Map = new Map(); { Object.entries(form).map(([key, value]) => { - inputs.set(key, value as string) + + if (!requestValue[key]) { + inputs.set(key, value as string) + } + } ) } - - if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.WideFibonnaciProvingRequest) { + console.log("inputs") + console.log("zkp_request",zkp_request) + + if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.WideFibonacciProvingRequest) { let log_n_instances = inputs.get("log_n_instances"); let log_fibonacci_size = inputs.get("log_fibonacci_size"); if (!log_n_instances && !log_fibonacci_size) return; @@ -382,7 +203,7 @@ const ProgramCard: React.FC = ({ event, zkp_request }) => { setError(verify_result?.message) } } - else if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.FibonnacciProvingRequest) { + else if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.FibonacciProvingRequest) { const prove_result = prove_and_verify_fib(logSize, claim); console.log("prove_result", prove_result); const serialised_proof_from_nostr_event = JSON.stringify(starkProof); diff --git a/askeladd-dvm-marketplace/src/app/components/description/index.tsx b/askeladd-dvm-marketplace/src/app/components/description/index.tsx index 18d0838..4a48029 100644 --- a/askeladd-dvm-marketplace/src/app/components/description/index.tsx +++ b/askeladd-dvm-marketplace/src/app/components/description/index.tsx @@ -4,7 +4,6 @@ export const HowItWork = () => { const [openHowItWork, setOpenHowItWork] = useState(false); - return(
setOpenHowItWork(!openHowItWork)} className="max-w-sm cursor-pointer my-5 p-1 m-1 whitespace-pre-line break-words" diff --git a/askeladd-dvm-marketplace/src/app/config-marketplace/page.tsx b/askeladd-dvm-marketplace/src/app/config-marketplace/page.tsx index 0d7a51e..e583879 100644 --- a/askeladd-dvm-marketplace/src/app/config-marketplace/page.tsx +++ b/askeladd-dvm-marketplace/src/app/config-marketplace/page.tsx @@ -10,7 +10,6 @@ import { ASKELADD_KINDS, ConfigHandle } from "@/types"; import EventCard from "../components/EventCard"; import { generateContentAndTags } from "../utils/generateAppHandler"; import { HowItWork } from "../components/description"; - export default function Home() { const [publicKey, setPublicKey] = useState(); const [appKind, setAppKind] = useState(ASKELADD_KINDS.KIND_JOB_REQUEST) @@ -28,7 +27,6 @@ export default function Home() { const [isAdmin, setIsAdmin] = useState(false); const [timestampJob, setTimestampJob] = useState(); const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() - const { sendNote, publishNote } = useSendNote() useEffect(() => { if (isNeedLoadEvents || !isInitialized) { @@ -60,7 +58,6 @@ export default function Home() { setIsNeedLoadEvents(false) } - /** Connect you */ const connectExtension = async () => { try { @@ -109,7 +106,6 @@ export default function Home() { setError(undefined); setTimestampJob(new Date().getTime()) - /** Use Nostr extension to send event */ const pool = new SimplePool(); let pubkey; if (typeof window !== "undefined" && window.nostr) { @@ -130,7 +126,6 @@ export default function Home() { content: content }) // takes an event object, adds `id`, `pubkey` and `sig` and returns it // Setup job request to fetch job id - // let eventID = await relay.publish(event as EventNostr); const eventID = await Promise.any(pool.publish(ASKELADD_RELAY, event as EventNostr)); console.log("eventID", eventID[0]) diff --git a/askeladd-dvm-marketplace/src/app/launch-program/page.tsx b/askeladd-dvm-marketplace/src/app/launch-program/page.tsx index 3077a99..2534d9d 100644 --- a/askeladd-dvm-marketplace/src/app/launch-program/page.tsx +++ b/askeladd-dvm-marketplace/src/app/launch-program/page.tsx @@ -4,14 +4,10 @@ import { useState, useEffect, useMemo } from "react"; import { NDKEvent, NDKKind } from '@nostr-dev-kit/ndk'; import { useSendNote } from "@/hooks/useSendNote"; import { useFetchEvents } from "@/hooks/useFetchEvents"; -import { APPLICATION_PUBKEY_DVM, ASKELADD_RELAY } from "@/constants/relay"; +import { ASKELADD_RELAY } from "@/constants/relay"; import { Event as EventNostr, SimplePool } from "nostr-tools"; import { ASKELADD_KINDS, ConfigHandle, ContractUploadType, IGenerateZKPRequestDVM, IProgramParams, KIND_JOB_ADD_PROGRAM } from "@/types"; -import EventCard from "../components/EventCard"; -import { generateContentAndTags } from "../utils/generateAppHandler"; import { HowItWork } from "../components/description"; -import { PROGRAM_INTERAL_REQUEST } from "@/constants/program"; - export default function LaunchProgram() { const [publicKey, setPublicKey] = useState(); const [appKind, setAppKind] = useState(ASKELADD_KINDS.KIND_JOB_REQUEST) @@ -30,7 +26,6 @@ export default function LaunchProgram() { const [timestampJob, setTimestampJob] = useState(); const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() const { sendNote, publishNote } = useSendNote() - const [logSize, setLogSize] = useState(5); const [claim, setClaim] = useState(443693538); const [inputIndex, setInputsIndex] = useState(0) const [isOpenForm, setIsOpenForm] = useState(false) @@ -178,20 +173,6 @@ export default function LaunchProgram() { }; - const mockProgram = async () => { - /** Todo better check */ - if (!isLoading && !isOpenForm && Object.entries(form).length == 0) return; - setIsLoading(true); - setJobId(undefined) - setProofStatus("pending"); - setError(undefined); - const tags = [ - ['param', 'log_size', logSize.toString()], - ['param', 'claim', claim.toString()], - ['output', 'text/json'] - ]; - - } const submitProgram = async () => { try { setIsLoading(true); @@ -199,7 +180,6 @@ export default function LaunchProgram() { setLastConfig(undefined); setError(undefined); console.log("formEncrypted", formEncrypted) - let tags: string[][] = [] const inputs: Map = new Map(); { @@ -236,20 +216,13 @@ export default function LaunchProgram() { } const content = JSON.stringify({ - // request: form as any, - // request: form, request: Object.fromEntries(inputs), program: { contract_name: programParam?.contract_name ?? "test", - // internal_contract_name: programParam?.internal_contract_name ?? "test", contract_reached: programParam?.contract_reached ?? ContractUploadType.Ipfs, inputs: Object.fromEntries(inputs), - // inputs_types: Object.fromEntries(inputs), - // inputs_encrypted: Object.fromEntries(inputs_encrypted), - // tags: tags } }) - console.log("tags", tags) console.log("content", content) setTimestampJob(new Date().getTime()) @@ -272,7 +245,6 @@ export default function LaunchProgram() { content: content }) // takes an event object, adds `id`, `pubkey` and `sig` and returns it // // Setup job request to fetch job id - // // let eventID = await relay.publish(event as EventNostr); const eventID = await Promise.any(pool.publish(ASKELADD_RELAY, event as EventNostr)); console.log("eventID", eventID[0]) @@ -307,13 +279,13 @@ export default function LaunchProgram() { } - const handleAllInputsEncrypted = () => { - Object.entries(form).map(([key, value]) => { - setFormEncrypted({ ...formEncrypted, [value as string]: true }) - } - ) + // const handleAllInputsEncrypted = () => { + // Object.entries(form).map(([key, value]) => { + // setFormEncrypted({ ...formEncrypted, [value as string]: true }) + // } + // ) - } + // } return (
@@ -327,8 +299,6 @@ export default function LaunchProgram() { > {isLoading ? "PROCESSING..." : "CONNECT"} - -

Askeladd DVM

Launch program

@@ -372,7 +342,6 @@ export default function LaunchProgram() { }}>X
-
) })} @@ -389,7 +358,6 @@ export default function LaunchProgram() {

Inputs encrypted

- {formEncrypted && Object.entries(formEncrypted).map(([key, value], i) => { return ( @@ -449,13 +417,7 @@ export default function LaunchProgram() {
{isLoading &&
} - {/* */} - {/* */} - {/* */} - +
diff --git a/askeladd-dvm-marketplace/src/app/page.tsx b/askeladd-dvm-marketplace/src/app/page.tsx index f4d740e..f927f3b 100644 --- a/askeladd-dvm-marketplace/src/app/page.tsx +++ b/askeladd-dvm-marketplace/src/app/page.tsx @@ -1,49 +1,26 @@ "use client"; import { useState, useEffect, useMemo } from "react"; -import { NDKEvent, NDKKind } from '@nostr-dev-kit/ndk'; -import { useNostrContext } from "@/context/NostrContext"; -import { useSendNote } from "@/hooks/useSendNote"; -import { JobResultProver, KIND_JOB_REQUEST, KIND_JOB_RESULT, ProgramInternalContractName } from "@/types"; -import init, { verify_stark_proof, prove_and_verify, prove_and_verify_fib, verify_stark_proof_fib, stark_proof_wide_fibo, verify_stark_proof_wide_fibo } from "../pkg/stwo_wasm"; -import { useFetchEvents } from "@/hooks/useFetchEvents"; -import { ASKELADD_RELAY } from "@/constants/relay"; -import { Relay } from 'nostr-tools/relay'; -import { Event as EventNostr, SimplePool } from "nostr-tools"; +import { NDKEvent } from '@nostr-dev-kit/ndk'; +import { ContractUploadType, ProgramInternalContractName } from "@/types"; +import init, { stark_proof_wide_fibo, verify_stark_proof_wide_fibo } from "../pkg/stwo_wasm"; +import { Event as EventNostr } from "nostr-tools"; +import { useDVMState } from "@/hooks/useDVMState"; export default function Home() { const [log_n_instances, setLogNInstances] = useState(0); - const [log_fibonnacci_size, setLogFibonnacciSize] = useState(5); - const [logSize, setLogSize] = useState(5); - const [claim, setClaim] = useState(443693538); - const [publicKey, setPublicKey] = useState(); - const [jobId, setJobId] = useState(); - // const [jobId, setJobId] = useState("78e3026c35d08ab8345b4efa49e0fe27c74f3849589720e01286cda69c36cc39"); - // Event ID test : "f708c6ba3c078a364ef7d5222310c14288841a63956b10186959b48e3284c4bb" - // 191ade3aa99bdbb7d6781e1149cf0ec4205db1ac097df9f83a6d7a10d88712c0 + const [log_fibonacci_size, setLogFibonacciSize] = useState(5); const [error, setError] = useState() - const [starkProof, setStarkProof] = useState() const [jobEventResult, setJobEventResult] = useState() - // const [starkProof, setStarkProof] = useState() - const [events, setEvents] = useState([]) - const [selectedEvent, setSelectedEvent] = useState() const [proofStatus, setProofStatus] = useState< "idle" | "pending" | "received" | "verified" >("idle"); - const [proof, setProof] = useState(null); const [isLoading, setIsLoading] = useState(false); const [isInitialized, setIsInitialized] = useState(false); const [isFetchJob, setIsFetchJob] = useState(false); - const [isLoadingJobResult, setIsLoadingJobResult] = useState(false); - const [isWaitingJob, setIsWaitingJob] = useState(false); - const [timestampJob, setTimestampJob] = useState(); - - let eventIdRequest = useMemo(() => { - return jobId - }, [jobId]) - const { ndk, pool } = useNostrContext() - const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() - const { sendNote, publishNote } = useSendNote() - + const { eventIdRequest, jobId, setJobId, setIsWaitingJob, fetchJobRequest, proof, fetchEventsProof, + starkProof, + submitJob: submitJobModular, + publicKey } = useDVMState() // Init wasm module to run_fibonacci_verify useEffect(() => { init() @@ -75,49 +52,6 @@ export default function Home() { } }, [jobId, isFetchJob, jobEventResult]) - const runSubscriptionEvent = (pool: SimplePool, pubkey?: string) => { - let poolRequest = pool.subscribeMany( - ASKELADD_RELAY, - [ - { - kinds: [KIND_JOB_REQUEST as NDKKind], - // since:timestampJob - // authors: pubkey ? [pubkey] : [] - }, - { - kinds: [KIND_JOB_RESULT as NDKKind], - // since:timestampJob - }, - ], - { - onevent(event) { - if (event?.kind == KIND_JOB_REQUEST) { - console.log("Event job request received: ", event?.id); - if (!jobId) return; - if (pubkey && event?.pubkey == pubkey) { - setJobId(event?.id) - } - poolRequest.close(); - - } - if (event?.kind == KIND_JOB_RESULT) { - console.log("Event job request received: ", event?.id); - if (!jobId) return; - if (pubkey && event?.pubkey == pubkey) { - setJobId(event?.id) - } - poolRequest.close(); - } - }, - onclose: () => { - poolRequest.close() - }, - oneose() { - poolRequest.close() - } - } - ) - } /** Submit job with JOB_REQUEST 5600 * - Use extension NIP-7 @@ -130,26 +64,17 @@ export default function Home() { setIsFetchJob(false); setJobId(undefined) setProofStatus("pending"); - setProof(null); setJobEventResult(undefined); setError(undefined); const tags = [ ['param', 'log_n_instances', log_n_instances.toString()], - ['param', 'log_fibonnacci_size', log_fibonnacci_size.toString()], + ['param', 'log_fibonacci_size', log_fibonacci_size.toString()], ['output', 'text/json'] ]; - // const tags = [ - // ['param', 'log_size', logSize.toString()], - // ['param', 'claim', claim.toString()], - // ['output', 'text/json'] - // ]; const tags_values = [ ['param', 'log_n_instances', log_n_instances.toString()], - ['param', 'log_fibonnacci_size', log_fibonnacci_size.toString()], - // ['param', 'claim', claim.toString()], - // ['param', 'log_size', logSize.toString()], - // ['param', 'claim', claim.toString()], + ['param', 'log_fibonacci_size', log_fibonacci_size.toString()], ]; @@ -158,80 +83,33 @@ export default function Home() { for (let tag of tags_values) { inputs.set(tag[1], tag[2]) } - console.log("inputs", Object.fromEntries(inputs)) + console.log("parent inputs", Object.fromEntries(inputs)) - const content = JSON.stringify({ + const zkp_request = { request: { - // log_size: logSize.toString(), log_n_instances: log_n_instances.toString(), - log_fibonnacci_size: log_fibonnacci_size.toString(), - // claim: claim.toString() + log_fibonacci_size: log_fibonacci_size.toString(), }, program: { - // contract_name: "PoseidonProvingRequest", - // internal_contract_name: "PoseidonProvingRequest", - contract_name: ProgramInternalContractName.WideFibonnaciProvingRequest.toString(), - internal_contract_name: ProgramInternalContractName.WideFibonnaciProvingRequest.toString(), - // internal_contract_name: "PoseidonProvingRequest", - - // contract_name:"FibonnacciProvingRequest", - // internal_contract_name:"FibonnacciProvingRequest", - contract_reached: "InternalAskeladd", - // inputs:JSON.stringify(Object.fromEntries(inputs)), - inputs: Object.fromEntries(inputs), - // inputs:tags + contract_name: ProgramInternalContractName.WideFibonacciProvingRequest.toString(), + internal_contract_name: ProgramInternalContractName.WideFibonacciProvingRequest, + contract_reached: ContractUploadType.InternalAskeladd, + inputs: inputs, } - }) - // Define the timestamp before which you want to fetch events - // setTimestampJob(new Date().getTime() / 1000) - setTimestampJob(new Date().getTime()) - console.log("inputs", inputs) - console.log("content", content) - // return ; - /** Use Nostr extension to send event */ - const pool = new SimplePool(); - const poolJob = new SimplePool(); - const relay = await Relay.connect(ASKELADD_RELAY[0]) - if (typeof window !== "undefined" && window.nostr) { - - const pubkey = await window.nostr.getPublicKey(); - console.log("pubkey",pubkey) - let created_at = new Date().getTime(); - setPublicKey(pubkey) - const event = await window.nostr.signEvent({ - pubkey: pubkey, - created_at: created_at, - kind: 5600, - tags: tags, - content: content - }) // takes an event object, adds `id`, `pubkey` and `sig` and returns it - // Setup job request to fetch job id + } - /** @TODO why the event id is not return? - * - get the last event and fetch job_id event - * - check if events is sent with subscription - * - */ - // let eventID = await relay.publish(event as EventNostr); - const eventID = await Promise.any(pool.publish(ASKELADD_RELAY, event as EventNostr)); - console.log("eventID", eventID[0]) - await fetchJobRequest(pubkey) - setIsWaitingJob(true); - await timeoutWaitingForJobResult() + let res = await submitJobModular(5600, { + log_n_instances, + log_fibonacci_size + }, + zkp_request, + tags - } else { + ) + fetchJobRequest(undefined, publicKey) + waitingForJobResult() + timeoutWaitingForJobResult() - /** @TODO flow is user doesn't have NIP-07 extension */ - // let { result, event } = await sendNote({ content, tags, kind: 5600 }) - // console.log("event", event) - // if (event?.sig) { - // setJobId(event?.sig); - // } - // setIsWaitingJob(true) - /** NDK event - * Generate or import private key after - */ - } } catch (e) { } finally { setIsLoading(false); @@ -239,113 +117,17 @@ export default function Home() { }; - /** TODO fetch subscribed event - * fix search jobId => check if relayer support NIP-50 - * Fetch Job result from the Prover - * - Tags: By reply of the event_id of the job request? - * - By author - * - Timestamp since/until (doesn't work as expected for me) -*/ - const fetchJobRequest = async (pubkey?: string) => { - - const { events } = await fetchEventsTools({ - kind: KIND_JOB_REQUEST, - since: timestampJob, - // authors: pubkey ? [pubkey] : [] - }); - console.log("events job request", events); - if (!events) return; - // const lastEvent = events[events?.length - 1] - const lastEvent = events[0] - if (!lastEvent?.id) return; - const lastEventId = lastEvent?.id; - if (pubkey && pubkey == lastEvent?.pubkey) { - console.log("lastEventId", lastEventId) - setJobId(lastEventId); - eventIdRequest = lastEventId; - setIsWaitingJob(true) - } - - } - - - /** TODO fetch subscribed event - * fix search jobId => check if relayer support NIP-50 - * Fetch Job result from the Prover - * - Tags: By reply of the event_id of the job request? - * - By author - * - Timestamp since/until (doesn't work as expected for me) - */ - const fetchEventsProof = async () => { - console.log("fetch events job result proof") - // if(jobEventResult && jobId)return; - setIsFetchJob(false); - setIsLoadingJobResult(true); - const { events } = await fetchEventsTools({ - kind: KIND_JOB_RESULT, - // since: timestampJob, - // search: jobId - // search: `#${jobId}`, - }) - console.log("events job result", events); - if (!events) return; - let lastEvent = events[events?.length - 1] - if (!lastEvent) return; - let id = jobId ?? eventIdRequest; - if (jobEventResult && jobEventResult?.id == id && proof && proofStatus != "pending") return; - if (id && !jobEventResult) { - let jobEventResultFind = events?.find((e) => e?.content?.includes(id)) - console.log("jobEventResultFind", jobEventResultFind); - let filterJob = events?.filter((e) => e?.id?.includes(id)) - console.log("filterJob", filterJob); - if (jobEventResultFind?.id) { - console.log("Event JOB_RESULT find", jobEventResultFind); - getDataOfEvent(jobEventResultFind); - setJobEventResult(jobEventResultFind) - } - } - } - - const getDataOfEvent = (lastEvent?: NDKEvent | EventNostr) => { - if (!lastEvent || !lastEvent?.content) return; - setSelectedEvent(lastEvent); - setProof(lastEvent?.content?.toString()) - const jobProofSerialize: any = JSON.parse(lastEvent?.content) - console.log('jobProofSerialize serialize', jobProofSerialize); - const proofSerialize = jobProofSerialize?.response?.proof; - console.log('proof serialize', proofSerialize); - setStarkProof(proofSerialize); - setProofStatus("received"); - return proofSerialize - } - const verifyProofHandler = async () => { try { if (proof) { setIsLoading(true); - - /** Change Poseidon to default */ - // const prove_result = prove_and_verify(log_n_instances); - // console.log("prove_result", prove_result); - // const serialised_proof_from_nostr_event = JSON.stringify(starkProof); - // console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); - // const verify_result = verify_stark_proof(logSize, serialised_proof_from_nostr_event); - // console.log("verify result", verify_result); - // console.log("verify message", verify_result.message); - // console.log("verify success", verify_result.success); - // if (verify_result?.success) { - // console.log("is success verify result") - // setProofStatus("verified"); - // } else { - // setError(verify_result?.message) - // } - - if (!log_n_instances && !log_fibonnacci_size) return; - const prove_result = stark_proof_wide_fibo(Number(log_fibonnacci_size), Number(log_n_instances)); - console.log("wide fibo prove_result", prove_result); + /** Change Wide fibo to default */ const serialised_proof_from_nostr_event = JSON.stringify(starkProof); + if (!log_n_instances && !log_fibonacci_size && !serialised_proof_from_nostr_event) return; + const prove_result = stark_proof_wide_fibo(Number(log_fibonacci_size), Number(log_n_instances)); + console.log("wide fibo prove_result", prove_result); console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); - const verify_result = verify_stark_proof_wide_fibo(Number(log_fibonnacci_size), Number(log_n_instances), serialised_proof_from_nostr_event); + const verify_result = verify_stark_proof_wide_fibo(Number(log_fibonacci_size), Number(log_n_instances), serialised_proof_from_nostr_event); console.log("verify result", verify_result); console.log("verify message", verify_result.message); console.log("verify success", verify_result.success); @@ -355,23 +137,6 @@ export default function Home() { } else { setError(verify_result?.message) } - - /** FIB default */ - // const prove_result = prove_and_verify_fib(logSize, claim); - // console.log("prove_result", prove_result); - // const serialised_proof_from_nostr_event = JSON.stringify(starkProof); - // console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); - // const verify_result = verify_stark_proof_fib(logSize, claim, serialised_proof_from_nostr_event); - // console.log("verify result", verify_result); - // console.log("verify message", verify_result.message); - // console.log("verify success", verify_result.success); - // if (verify_result?.success) { - // console.log("is success verify result") - // setProofStatus("verified"); - // } else { - // setError(verify_result?.message) - // } - setIsLoading(false); setIsFetchJob(true) } @@ -395,20 +160,17 @@ export default function Home() {

Censorship resistant global proving network

Verifiable computation for DVMs

- {/*

Prove poseidon

*/} -

Wide Fibonnacci

+

Wide Fibonacci

- + setLogFibonnacciSize(Number(e.target.value))} + value={log_fibonacci_size} + onChange={(e) => setLogFibonacciSize(Number(e.target.value))} className="w-full bg-black text-neon-green px-3 py-2 rounded border-neon-green border-2" />
- -
- - {/*
- - setClaim(Number(e.target.value))} - className="w-full bg-black text-neon-green px-3 py-2 rounded border-neon-green border-2" - /> -
*/} - - - {/*
- - setLogSize(Number(e.target.value))} - className="w-full bg-black text-neon-green px-3 py-2 rounded border-neon-green border-2" - /> -
- -
- - setClaim(Number(e.target.value))} - className="w-full bg-black text-neon-green px-3 py-2 rounded border-neon-green border-2" - /> -
*/} -
{isLoading &&
} - {jobId && (

Job ID: {jobId}

diff --git a/askeladd-dvm-marketplace/src/app/stwo-program/page.tsx b/askeladd-dvm-marketplace/src/app/stwo-program/page.tsx index ee40a71..1a484a6 100644 --- a/askeladd-dvm-marketplace/src/app/stwo-program/page.tsx +++ b/askeladd-dvm-marketplace/src/app/stwo-program/page.tsx @@ -1,71 +1,22 @@ "use client"; -import { useState, useEffect, useMemo } from "react"; -import { NDKEvent, NDKKind } from '@nostr-dev-kit/ndk'; -import { useNostrContext } from "@/context/NostrContext"; -import { useSendNote } from "@/hooks/useSendNote"; -import { ContractUploadType, IGenerateZKPRequestDVM, JobResultProver, KIND_JOB_ADD_PROGRAM, KIND_JOB_REQUEST, KIND_JOB_RESULT, ProgramInternalContractName } from "@/types"; -import init, { verify_stark_proof, prove_and_verify } from "../../pkg/stwo_wasm"; -import { useFetchEvents } from "@/hooks/useFetchEvents"; -import { ASKELADD_RELAY } from "@/constants/relay"; -import { Relay } from 'nostr-tools/relay'; -import { Event as EventNostr, SimplePool } from "nostr-tools"; +import { useState, useEffect } from "react"; +import { IGenerateZKPRequestDVM } from "@/types"; import { PROGRAM_INTERAL_REQUEST } from "@/constants/program"; import ProgramCard from "../components/ProgramCard"; import InternalProgram from "../components/InternalProgram"; +import { useDVMState } from "@/hooks/useDVMState"; export default function StwoProgramMarketplace() { - const [logSize, setLogSize] = useState(5); - const [claim, setClaim] = useState(443693538); - const [publicKey, setPublicKey] = useState(); - const [jobId, setJobId] = useState(); - const [error, setError] = useState() - const [starkProof, setStarkProof] = useState() - const [jobEventResult, setJobEventResult] = useState() - const [events, setEvents] = useState([]) - const [selectedEvent, setSelectedEvent] = useState() - const [proofStatus, setProofStatus] = useState< - "idle" | "pending" | "received" | "verified" - >("idle"); - const [proof, setProof] = useState(null); const [isLoading, setIsLoading] = useState(false); - const [isInitialized, setIsInitialized] = useState(false); const [isFetchJob, setIsFetchJob] = useState(false); - const [isLoadingJobResult, setIsLoadingJobResult] = useState(false); - const [isWaitingJob, setIsWaitingJob] = useState(false); - const [timestampJob, setTimestampJob] = useState(); - - const [internalProgram, setInternalProgram] = useState(PROGRAM_INTERAL_REQUEST) - - let eventIdRequest = useMemo(() => { - return jobId - }, [jobId]) - const { ndk, pool } = useNostrContext() - const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() - const { sendNote, publishNote } = useSendNote() - - // Init wasm module to run_fibonacci_verify - useEffect(() => { - init() - .then(() => setIsInitialized(true)) - .catch((error) => { - console.error("Failed to initialize WASM module:", error); - - }); - }, []); - + const [internalsPrograms, setInternalPrograms] = useState(PROGRAM_INTERAL_REQUEST) + const {fetchEventsProof, jobId, jobEventResult, fetchPrograms, events, eventsPrograms} = useDVMState() /** Effect to fetch the job result when a job request is sent */ const waitingForJobResult = async () => { if (jobEventResult && jobId) return; if(!jobId) return; fetchEventsProof() setIsLoading(false); - setIsWaitingJob(false) - } - const timeoutWaitingForJobResult = async () => { - console.log("waiting timeout job result") - setTimeout(() => { - waitingForJobResult() - }, 5000); } useEffect(() => { @@ -75,107 +26,27 @@ export default function StwoProgramMarketplace() { } }, [jobId, isFetchJob, jobEventResult]) - - - const fetchPrograms = async () => { - console.log("fetch events program") - // if(jobEventResult && jobId)return; - setIsFetchJob(false); - setIsLoadingJobResult(true); - const { events } = await fetchEvents({ - kind: KIND_JOB_ADD_PROGRAM, - // kinds:[KIND_JOB_ADD_PROGRAM as NDKKind] - // since: timestampJob, - // search: jobId - // search: `#${jobId}`, - }) - console.log("events job program", events); - setEvents(events) - if (!events) return; - let lastEvent = events[events?.length - 1] - if (!lastEvent) return; - let id = jobId ?? eventIdRequest; - - } - - /** TODO fetch subscribed event - * fix search jobId => check if relayer support NIP-50 - * Fetch Job result from the Prover - * - Tags: By reply of the event_id of the job request? - * - By author - * - Timestamp since/until (doesn't work as expected for me) - */ - const fetchEventsProof = async () => { - console.log("fetch events job result proof") - // if(jobEventResult && jobId)return; - setIsFetchJob(false); - setIsLoadingJobResult(true); - const { events } = await fetchEventsTools({ - kind: KIND_JOB_RESULT, - // kinds:[KIND_JOB_RESULT as NDKKind] - // since: timestampJob, - // search: jobId - // search: `#${jobId}`, - }) - console.log("events job result", events); - if (!events) return; - let lastEvent = events[events?.length - 1] - if (!lastEvent) return; - let id = jobId ?? eventIdRequest; - if (jobEventResult && jobEventResult?.id == id && proof && proofStatus != "pending") return; - if (id && !jobEventResult) { - let jobEventResultFind = events?.find((e) => e?.content?.includes(id)) - console.log("jobEventResultFind", jobEventResultFind); - if (jobEventResultFind?.id) { - console.log("Event JOB_RESULT find", jobEventResultFind); - getDataOfEvent(jobEventResultFind); - setJobEventResult(jobEventResultFind) - } - } - } - - const getDataOfEvent = (lastEvent?: NDKEvent | EventNostr) => { - if (!lastEvent || !lastEvent?.content) return; - setSelectedEvent(lastEvent); - setProof(lastEvent?.content?.toString()) - const jobProofSerialize: any = JSON.parse(lastEvent?.content) - console.log('jobProofSerialize serialize', jobProofSerialize); - const proofSerialize = jobProofSerialize?.response?.proof; - console.log('proof serialize', proofSerialize); - setStarkProof(proofSerialize); - setProofStatus("received"); - return proofSerialize - } - return (
-

Askeladd DVM

STWO ZK Program Marketplace

Check the STWO Prover ready to use!

- - -
{internalProgram?.map((p, i) => { +
{internalsPrograms?.map((p, i) => { return ( ) })} -
-
{events?.map((e, i) => { - console.log("e program", e) - +
{eventsPrograms?.map((e, i) => { const p: IGenerateZKPRequestDVM = JSON.parse(e.content) - console.log("p", p) - return ( ) diff --git a/askeladd-dvm-marketplace/src/app/utils/generateAppHandler.ts b/askeladd-dvm-marketplace/src/app/utils/generateAppHandler.ts index 449b092..255ca15 100644 --- a/askeladd-dvm-marketplace/src/app/utils/generateAppHandler.ts +++ b/askeladd-dvm-marketplace/src/app/utils/generateAppHandler.ts @@ -4,9 +4,6 @@ import { ASKELADD_KINDS, ConfigHandle, KIND_JOB_REQUEST, KIND_JOB_RESULT } from const PARAMS_JOB_REQUEST_ZK = { request: { - }, - params: { - }, program: { @@ -24,15 +21,13 @@ const PARAMS_JOB_RESULT_ZK = { } } - const PARAMS_ALL_KIND = { [KIND_JOB_REQUEST]:PARAMS_JOB_REQUEST_ZK, [KIND_JOB_RESULT]:PARAMS_JOB_RESULT_ZK, } +/** TODO correct bech32 for nprofile, nevent of the application handler */ export const generateTagsByAppKind = (tags: string[][], appKind: ASKELADD_KINDS, config:ConfigHandle) => { - const randomId = Math.random().toString(36).substring(7); - if(config == ConfigHandle.SPECIFIC_KIND) { tags = [ ["d", randomId], @@ -59,6 +54,8 @@ export const generateTagsByAppKind = (tags: string[][], appKind: ASKELADD_KINDS, return tags } + +/** TODO correct bech32 for nprofile, nevent of the application handler */ export const generateContentAndTags = (configKind: ConfigHandle, appKind?: ASKELADD_KINDS, pubkey?:string): { tags?: string[][], content?: string } => { let tags: string[][] = [] let content = ""; diff --git a/askeladd-dvm-marketplace/src/constants/program.ts b/askeladd-dvm-marketplace/src/constants/program.ts index 73c5262..d4e7d45 100644 --- a/askeladd-dvm-marketplace/src/constants/program.ts +++ b/askeladd-dvm-marketplace/src/constants/program.ts @@ -29,14 +29,14 @@ export const PROGRAM_INTERAL_REQUEST:IGenerateZKPRequestDVM[] = [ { - // Wide Fibonnaci + // Wide Fibonacci request: { log_fibonacci_size:0, log_n_instances:0 }, program: { - contract_name:ProgramInternalContractName.WideFibonnaciProvingRequest.toString(), - internal_contract_name:ProgramInternalContractName.WideFibonnaciProvingRequest, + contract_name:ProgramInternalContractName.WideFibonacciProvingRequest.toString(), + internal_contract_name:ProgramInternalContractName.WideFibonacciProvingRequest, contract_reached:ContractUploadType.InternalAskeladd, inputs:program_map_wide_fibo } @@ -65,8 +65,8 @@ export const PROGRAM_INTERAL_REQUEST:IGenerateZKPRequestDVM[] = [ // claim:0 // }, // program: { - // contract_name:ProgramInternalContractName.FibonnacciProvingRequest.toString(), - // internal_contract_name:ProgramInternalContractName.FibonnacciProvingRequest, + // contract_name:ProgramInternalContractName.FibonacciProvingRequest.toString(), + // internal_contract_name:ProgramInternalContractName.FibonacciProvingRequest, // contract_reached:ContractUploadType.InternalAskeladd, // inputs:program_map_fibo // } @@ -81,8 +81,8 @@ export const PROGRAM_INTERAL_REQUEST:IGenerateZKPRequestDVM[] = [ // claims:0 // }, // program: { - // contract_name:ProgramInternalContractName.MultiFibonnacciProvingRequest.toString(), - // internal_contract_name:ProgramInternalContractName.MultiFibonnacciProvingRequest, + // contract_name:ProgramInternalContractName.MultiFibonacciProvingRequest.toString(), + // internal_contract_name:ProgramInternalContractName.MultiFibonacciProvingRequest, // contract_reached:ContractUploadType.InternalAskeladd, // inputs:program_map_multi_fibo // } diff --git a/askeladd-dvm-marketplace/src/hooks/useDVMState.ts b/askeladd-dvm-marketplace/src/hooks/useDVMState.ts new file mode 100644 index 0000000..92f5575 --- /dev/null +++ b/askeladd-dvm-marketplace/src/hooks/useDVMState.ts @@ -0,0 +1,299 @@ +import { ASKELADD_RELAY } from '@/constants/relay'; +import { IGenerateZKPRequestDVM, KIND_JOB_ADD_PROGRAM, KIND_JOB_REQUEST, KIND_JOB_RESULT } from '@/types'; +import { NDKEvent, NDKKind } from '@nostr-dev-kit/ndk'; +import { SimplePool, NostrEvent, Relay } from 'nostr-tools'; +import { useMemo, useState } from 'react'; +import { useFetchEvents } from './useFetchEvents'; + +export const useDVMState = () => { + const [proofStatus, setProofStatus] = useState< + "idle" | "pending" | "received" | "verified" + >("idle"); + const [publicKey, setPublicKey] = useState(); + const [pool, setPool] = useState(new SimplePool()) + const [jobId, setJobId] = useState(); + const [isWaitingJob, setIsWaitingJob] = useState(false); + const [jobEventResult, setJobEventResult] = useState() + const [starkProof, setStarkProof] = useState() + const [isFetchJob, setIsFetchJob] = useState(false); + const [isLoadingJobResult, setIsLoadingJobResult] = useState(false); + const [selectedEvent, setSelectedEvent] = useState() + const [events, setEvents] = useState([]) + const [eventsPrograms, setEventsPrograms] = useState([]) + let eventIdRequest = useMemo(() => { + return jobId + }, [jobId]) + const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() + + const [proof, setProof] = useState(null); + + /** TODO fetch subscribed event +* fix search jobId => check if relayer support NIP-50 +* Fetch Job result from the Prover +* - Tags: By reply of the event_id of the job request? +* - By author +* - Timestamp since/until (doesn't work as expected for me) +*/ + const fetchJobRequest = async (timestampJob?: number, pubkey?: string) => { + + const { events } = await fetchEventsTools({ + kind: KIND_JOB_REQUEST, + since: timestampJob, + }); + console.log("events job request", events); + if (!events) return; + const lastEvent = events[0] + if (!lastEvent?.id) return; + const lastEventId = lastEvent?.id; + if (pubkey && pubkey == lastEvent?.pubkey) { + console.log("lastEventId", lastEventId) + setJobId(lastEventId); + eventIdRequest = lastEventId; + setIsWaitingJob(true) + } + } + + + const runSubscriptionEvent = (pool: SimplePool, pubkey?: string) => { + let poolRequest = pool.subscribeMany( + ASKELADD_RELAY, + [ + { + kinds: [KIND_JOB_REQUEST as NDKKind], + // since:timestampJob + // authors: pubkey ? [pubkey] : [] + }, + { + kinds: [KIND_JOB_RESULT as NDKKind], + // since:timestampJob + }, + ], + { + onevent(event) { + if (event?.kind == KIND_JOB_REQUEST) { + console.log("Event job request received: ", event?.id); + if (!jobId) return; + if (pubkey && event?.pubkey == pubkey) { + setJobId(event?.id) + } + poolRequest.close(); + } + if (event?.kind == KIND_JOB_RESULT) { + console.log("Event job request received: ", event?.id); + if (!jobId) return; + if (pubkey && event?.pubkey == pubkey) { + setJobId(event?.id) + } + poolRequest.close(); + } + }, + onclose: () => { + poolRequest.close() + }, + oneose() { + poolRequest.close() + } + } + ) + } + + /** TODO fetch subscribed event + * fix search jobId => check if relayer support NIP-50 + * Fetch Job result from the Prover + * - Tags: By reply of the event_id of the job request? + * - By author + * - Timestamp since/until (doesn't work as expected for me) + */ + const fetchEventsProof = async () => { + console.log("fetch events job result proof") + console.log("last job request id", jobId) + // if(jobEventResult && jobId)return; + setIsFetchJob(false); + setIsLoadingJobResult(true); + const { events } = await fetchEventsTools({ + kind: KIND_JOB_RESULT, + // since: timestampJob, + // search: jobId + // search: `#${jobId}`, + }) + console.log("events job result", events); + if (!events) return; + let lastEvent = events[events?.length - 1] + if (!lastEvent) return; + let id = jobId ?? eventIdRequest; + if (jobEventResult && jobEventResult?.id == id && proof && proofStatus != "pending") return; + if (id && !jobEventResult) { + let jobEventResultFind = events?.find((e) => e?.content?.includes(id)) + console.log("jobEventResultFind", jobEventResultFind); + let filterJob = events?.filter((e) => e?.id?.includes(id)) + console.log("filterJob", filterJob); + if (jobEventResultFind?.id) { + console.log("Event JOB_RESULT find", jobEventResultFind); + getDataOfEvent(jobEventResultFind); + setJobEventResult(jobEventResultFind) + } + } + } + + const getDataOfEvent = (lastEvent?: NDKEvent | NostrEvent) => { + if (!lastEvent || !lastEvent?.content) return; + setSelectedEvent(lastEvent); + setProof(lastEvent?.content?.toString()) + const jobProofSerialize: any = JSON.parse(lastEvent?.content) + console.log('jobProofSerialize serialize', jobProofSerialize); + const proofSerialize = jobProofSerialize?.response?.proof; + console.log('proof serialize', proofSerialize); + setStarkProof(proofSerialize); + setProofStatus("received"); + return proofSerialize + } + + const submitJob = async (kind: number, form: any, zkp_request?: IGenerateZKPRequestDVM, tags_parents?: string[][], request?: any) => { + try { + // setIsLoading(true); + // setIsFetchJob(false); + // setJobId(undefined) + // setProofStatus("pending"); + // setProof(null); + // setJobEventResult(undefined); + // setError(undefined); + let tags: string[][] = tags_parents ?? [] + console.log("tags parents", tags) + console.log("zkp_request parent", zkp_request) + + const inputs: Map = zkp_request?.program?.inputs ?? new Map(); + if (zkp_request?.program?.inputs) { + Object.entries(zkp_request.program.inputs).map(([key, value]) => { + inputs.set(key, value as string) + } + ) + for (let [key, value] of inputs) { + tags.push(["param", key, value]) + } + } else if (form) { + Object.entries(form).map(([key, value]) => { + inputs.set(key, value as string) + } + ) + for (let [key, value] of inputs) { + tags.push(["param", key, value]) + } + } + tags.push(['output', 'text/json']) + console.log("tags", tags) + + console.log("inputs", Object.fromEntries(inputs)) + const content = JSON.stringify({ + // request: form, + request: form ?? inputs, + // request: Object.fromEntries(inputs), + program: { + contract_name: zkp_request?.program?.contract_name, + internal_contract_name: zkp_request?.program?.internal_contract_name, + contract_reached: zkp_request?.program?.contract_reached, + inputs: Object.fromEntries(inputs), + // inputs:inputs, + inputs_types: undefined, + inputs_encrypted: undefined + } + }) + + console.log("inputs", inputs) + console.log("content", content) + // return ; + const timestamp = new Date().getTime() + /** Use Nostr extension to send event */ + const pool = new SimplePool(); + const poolJob = new SimplePool(); + const relay = await Relay.connect(ASKELADD_RELAY[0]) + if (typeof window !== "undefined" && window.nostr) { + + const pubkey = await window.nostr.getPublicKey(); + console.log("pubkey", pubkey) + setPublicKey(pubkey) + + let created_at = new Date().getTime(); + // setPublicKey(pubkey) + const event = await window.nostr.signEvent({ + pubkey: pubkey, + created_at: created_at, + kind: kind, + tags: tags, + content: content + }) // takes an event object, adds `id`, `pubkey` and `sig` and returns it + // Setup job request to fetch job id + + /** @TODO why the event id is not return? + * - get the last event and fetch job_id event + * - check if events is sent with subscription + * + */ + // let eventID = await relay.publish(event as EventNostr); + const eventID = await Promise.any(pool.publish(ASKELADD_RELAY, event as NostrEvent)); + console.log("eventID", eventID[0]) + await fetchJobRequest(timestamp, pubkey) + setIsWaitingJob(true); + + return { + success: true, + }; + // await timeoutWaitingForJobResult() + + } else { + + /** @TODO flow is user doesn't have NIP-07 extension */ + // let { result, event } = await sendNote({ content, tags, kind: 5600 }) + // console.log("event", event) + // if (event?.sig) { + // setJobId(event?.sig); + // } + // setIsWaitingJob(true) + /** NDK event + * Generate or import private key after + */ + return { + success: false, + }; + } + } catch (e) { + } finally { + // setIsLoading(false); + } + + }; + + const fetchPrograms = async () => { + console.log("fetch events program") + const { events } = await fetchEvents({ + kind: KIND_JOB_ADD_PROGRAM, + // kinds:[KIND_JOB_ADD_PROGRAM as NDKKind] + // since: timestampJob, + // search: jobId + // search: `#${jobId}`, + }) + console.log("events job program", events); + setEventsPrograms(events) + if (!events) return; + let lastEvent = events[events?.length - 1] + if (!lastEvent) return; + + } + + + return { + starkProof, proof, proofStatus, setProof, setProofStatus, + runSubscriptionEvent, + fetchJobRequest, + submitJob, + fetchEventsProof, + setJobId, + jobId, eventIdRequest, + isWaitingJob, setIsWaitingJob, + publicKey, setPublicKey, + setIsLoadingJobResult, + jobEventResult, + fetchPrograms, + eventsPrograms, + events + } +}; diff --git a/askeladd-dvm-marketplace/src/hooks/useFetchEvents.ts b/askeladd-dvm-marketplace/src/hooks/useFetchEvents.ts index bff1f4b..7d2da02 100644 --- a/askeladd-dvm-marketplace/src/hooks/useFetchEvents.ts +++ b/askeladd-dvm-marketplace/src/hooks/useFetchEvents.ts @@ -3,7 +3,8 @@ import { useNostrContext } from '@/context/NostrContext'; import { KIND_JOB_REQUEST, KIND_JOB_RESULT } from '@/types'; import { NDKKind } from '@nostr-dev-kit/ndk'; import { SimplePool, NostrEvent } from 'nostr-tools'; -import { useState } from 'react'; +import { useMemo, useState } from 'react'; +import { useDVMState } from './useDVMState'; interface IEventFilter { kind?: NDKKind | number, limit?: number, since?: number, until?: number, kinds?: NDKKind[], search?: string, ids?: string[], authors?: string[] @@ -19,6 +20,21 @@ export const useFetchEvents = () => { const { ndk } = useNostrContext(); // const pool = new SimplePool() const [pool, setPool] = useState(new SimplePool()) + const [isWaitingJob, setIsWaitingJob] = useState(false); + +// let eventIdRequest = useMemo(() => { +// return jobId +// }, [jobId]) + + /** TODO fetch subscribed event +* fix search jobId => check if relayer support NIP-50 +* Fetch Job result from the Prover +* - Tags: By reply of the event_id of the job request? +* - By author +* - Timestamp since/until (doesn't work as expected for me) +*/ + + const fetchEvents = async (data: IEventFilter) => { try { @@ -98,5 +114,55 @@ export const useFetchEvents = () => { // setPool(pool); return h; } - return { fetchEvents, fetchEventsTools, setupSubscriptionNostr, pool } + + const runSubscriptionEvent = (pool: SimplePool, pubkey?: string, jobId?:string) => { + let poolRequest = pool.subscribeMany( + ASKELADD_RELAY, + [ + { + kinds: [KIND_JOB_REQUEST as NDKKind], + // since:timestampJob + // authors: pubkey ? [pubkey] : [] + }, + { + kinds: [KIND_JOB_RESULT as NDKKind], + // since:timestampJob + }, + ], + { + onevent(event) { + // if (event?.kind == KIND_JOB_REQUEST) { + // console.log("Event job request received: ", event?.id); + // if (!jobId) return; + // if (pubkey && event?.pubkey == pubkey) { + // setJobId(event?.id) + // } + // poolRequest.close(); + + // } + // if (event?.kind == KIND_JOB_RESULT) { + // console.log("Event job request received: ", event?.id); + // if (!jobId) return; + // if (pubkey && event?.pubkey == pubkey) { + // setJobId(event?.id) + // } + // poolRequest.close(); + // } + }, + onclose: () => { + poolRequest.close() + }, + oneose() { + poolRequest.close() + } + } + ) + } + + + return { + fetchEvents, fetchEventsTools, setupSubscriptionNostr, pool, runSubscriptionEvent, + + isWaitingJob, setIsWaitingJob, + } }; diff --git a/askeladd-dvm-marketplace/src/hooks/useSubmitJob.ts b/askeladd-dvm-marketplace/src/hooks/useSubmitJob.ts new file mode 100644 index 0000000..f6aa03b --- /dev/null +++ b/askeladd-dvm-marketplace/src/hooks/useSubmitJob.ts @@ -0,0 +1,26 @@ +import { ASKELADD_RELAY } from '@/constants/relay'; +import { useNostrContext } from '@/context/NostrContext'; +import { IGenerateZKPRequestDVM, KIND_JOB_REQUEST, KIND_JOB_RESULT, ProgramInternalContractName } from '@/types'; +import { NDKKind } from '@nostr-dev-kit/ndk'; +import { SimplePool, NostrEvent, Relay } from 'nostr-tools'; +import { useMemo, useState } from 'react'; +import { useFetchEvents } from './useFetchEvents'; +import { useDVMState } from './useDVMState'; + +export const useSubmitJob = () => { + const { ndk } = useNostrContext(); + const [pool, setPool] = useState(new SimplePool()) + + const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() + const { setIsWaitingJob, setJobId, fetchJobRequest, fetchEventsProof, jobId, eventIdRequest, + setPublicKey + } = useDVMState() + + + return { + setupSubscriptionNostr, + setJobId, + eventIdRequest, + // submitJob + } +}; diff --git a/askeladd-dvm-marketplace/src/types/index.ts b/askeladd-dvm-marketplace/src/types/index.ts index ae39cd4..0cd638b 100644 --- a/askeladd-dvm-marketplace/src/types/index.ts +++ b/askeladd-dvm-marketplace/src/types/index.ts @@ -3,6 +3,62 @@ export const KIND_JOB_REQUEST = 5600 export const KIND_JOB_ADD_PROGRAM = 5700 // check if not used +export enum ASKELADD_KINDS { + KIND_JOB_REQUEST = 5600, + KIND_JOB_RESULT = 6600, + KIND_JOB_LAUNCH_PROGRAM = 5700, + // KIND_SUBMIT_PROGRAM +} + +export enum ASKELADD_KINDS_NAME { + KIND_JOB_REQUEST = "Job request", + KIND_JOB_RESULT = "Job result", + KIND_SUBMIT_PROGRAM = "Submit result", +} + +export interface IProgramParams { + // Add to the see user Application profile NIP-0 metadata + // Also can be used to do a 1-1 DVM and force it. + pubkey_app?: string; + // Event id related to the NIP-89 and the JOB_LAUNCH_PROGRAM 5700 or other kind we can discuss + event_id?: string; + unique_id?: string; + inputs?: Map + inputs_types?: Map + inputs_encrypted?: Map + contract_reached?: ContractUploadType, + contract_name?: string; + internal_contract_name?: ProgramInternalContractName + +} +export interface IGenerateZKPRequestDVM { + request?: any; + program?: IProgramParams; +} + +export interface IFormRecommendedApplicationHandlerEvent { + +} + +export enum ConfigHandle { + SPECIFIC_KIND, + ALL_KIND +} + +export enum ProgramInternalContractName { + FibonacciProvingRequest = "FibonacciProvingRequest", + PoseidonProvingRequest = "PoseidonProvingRequest", + WideFibonacciProvingRequest = "WideFibonacciProvingRequest", + MultiFibonacciProvingRequest = "MultiFibonacciProvingRequest", + Custom = "Custom" +} + + +export enum ContractUploadType { + InternalAskeladd = "InternalAskeladd", + Ipfs = "Ipfs", +} + export interface JobResultProver { job_id: string; response: { @@ -45,62 +101,3 @@ export interface CommitmentSchemeProof { } }; } - -export enum ASKELADD_KINDS { - KIND_JOB_REQUEST = 5600, - KIND_JOB_RESULT = 6600, - KIND_JOB_LAUNCH_PROGRAM = 5700, - // KIND_SUBMIT_PROGRAM -} - -export enum ASKELADD_KINDS_NAME { - KIND_JOB_REQUEST = "Job request", - KIND_JOB_RESULT = "Job result", - KIND_SUBMIT_PROGRAM = "Submit result", -} -// export const ASKELADD_KINDS= { -// KIND_JOB_REQUEST, -// KIND_JOB_RESULT -// } - -export interface IFormRecommendedApplicationHandlerEvent { - -} - -export enum ConfigHandle { - SPECIFIC_KIND, - ALL_KIND -} - -export enum ProgramInternalContractName { - FibonnacciProvingRequest = "FibonnacciProvingRequest", - PoseidonProvingRequest = "PoseidonProvingRequest", - WideFibonnaciProvingRequest = "WideFibonnaciProvingRequest", - MultiFibonnacciProvingRequest = "MultiFibonnacciProvingRequest", - Custom = "Custom" -} - - -export enum ContractUploadType { - InternalAskeladd = "InternalAskeladd", - Ipfs = "Ipfs", -} - -export interface IProgramParams { - // Add to the see user Application profile NIP-0 metadata - // Also can be used to do a 1-1 DVM and force it. - pubkey_app?: string; - // Event id related to the NIP-89 and the JOB_LAUNCH_PROGRAM 5700 or other kind we can discuss - event_id?: string; - - unique_id?: string; - inputs?: Map - contract_reached?: ContractUploadType, - contract_name?: string; - internal_contract_name?: ProgramInternalContractName - -} -export interface IGenerateZKPRequestDVM { - request?: any; - program?: IProgramParams; -} \ No newline at end of file diff --git a/crates/cli/src/dvm_customer.rs b/crates/cli/src/dvm_customer.rs index 66324e6..17e1317 100644 --- a/crates/cli/src/dvm_customer.rs +++ b/crates/cli/src/dvm_customer.rs @@ -6,7 +6,7 @@ use std::time::Duration; use askeladd::config::Settings; use askeladd::dvm::customer::{Customer, CustomerError}; use askeladd::dvm::types::{ - ContractUploadType, FibonnacciProvingRequest, GenerateZKPJobRequest, PoseidonProvingRequest, + ContractUploadType, FibonacciProvingRequest, GenerateZKPJobRequest, PoseidonProvingRequest, ProgramInternalContractName, ProgramParams, }; use colored::*; @@ -58,7 +58,7 @@ async fn main() -> Result<(), Box> { map_inputs.insert("log_size".to_owned(), "5".to_owned()); map_inputs.insert("claim".to_owned(), "443693538".to_owned()); map_inputs.insert("output".to_owned(), "text/json".to_owned()); - let req_value = serde_json::to_value(FibonnacciProvingRequest { + let req_value = serde_json::to_value(FibonacciProvingRequest { log_size: 5, claim: 443693538, }) @@ -67,7 +67,7 @@ async fn main() -> Result<(), Box> { request: req_value, program: Some(ProgramParams { pubkey_application: None, - inputs: map_inputs, + inputs: Some(map_inputs), inputs_encrypted: None, inputs_types: None, unique_id: None, @@ -146,7 +146,7 @@ pub async fn poseidon_program(customer: Customer) -> Result<(), CustomerError> { let job_request = GenerateZKPJobRequest { request: req_value, program: Some(ProgramParams { - inputs: map_inputs, + inputs: Some(map_inputs), pubkey_application: None, inputs_encrypted: None, inputs_types: None, diff --git a/crates/core/src/db.rs b/crates/core/src/db.rs index 0b35fd9..f34d64e 100644 --- a/crates/core/src/db.rs +++ b/crates/core/src/db.rs @@ -44,8 +44,6 @@ impl Database { Ok(()) } - // pub fn insert_request(&self, job_id: &str, request: &FibonnacciProvingRequest) -> Result<()> - // { pub fn insert_request(&self, job_id: &str, request: &serde_json::Value) -> Result<()> { let request_json = serde_json::to_string(request).unwrap(); self.conn.execute( diff --git a/crates/core/src/dvm/customer.rs b/crates/core/src/dvm/customer.rs index cd73b0b..f414afb 100644 --- a/crates/core/src/dvm/customer.rs +++ b/crates/core/src/dvm/customer.rs @@ -11,6 +11,7 @@ use tokio::time::timeout; use crate::config::Settings; use crate::dvm::constants::*; use crate::dvm::types::{GenerateZKPJobRequest, GenerateZKPJobResult}; +use crate::nostr_utils::extract_params_from_tags; use crate::verifier_service::VerifierService; /// Represents a customer in the Askeladd system. @@ -84,7 +85,20 @@ impl Customer { let mut params_inputs: HashMap = HashMap::new(); let mut tags = vec![]; if let Some(p) = program { - params_inputs = p.inputs; + if let Some(inputs) = p.inputs { + params_inputs = inputs; + } else { + let successful_parses = extract_params_from_tags(&tags); + // let inputs_values:HashMap= successful_parses + // .into_iter() + // .map(|(k, v)| { + // let val:Value= serde_json::to_value(&v).unwrap(); + // // params_inputs.insert(k.clone(), val.clone()); + // return (k, val) + // }) + // .collect(); + params_inputs = successful_parses; + } } // OLD TAGS creation // let tags = vec![ diff --git a/crates/core/src/dvm/mod.rs b/crates/core/src/dvm/mod.rs index 5652d71..5639bdd 100644 --- a/crates/core/src/dvm/mod.rs +++ b/crates/core/src/dvm/mod.rs @@ -48,8 +48,8 @@ pub mod types { pub enum ProgramInternalContractName { FibonnacciProvingRequest, PoseidonProvingRequest, - WideFibonnaciProvingRequest, - MultiFibonnaciProvingRequest, + WideFibonacciProvingRequest, + MultiFibonacciProvingRequest, Custom(String), } @@ -59,15 +59,14 @@ pub mod types { pub unique_id: Option, pub pubkey_application: Option, /* Use for one to one marketplace => difficult * on the archi of the DVM */ - pub inputs: HashMap, + pub inputs: Option>, pub inputs_types: Option>, pub inputs_encrypted: Option>, pub contract_reached: ContractUploadType, pub contract_name: Option, pub internal_contract_name: Option, pub tags: Option>, - // For External program - // pub endpoint:Option, + // todo config payment and minimal sats } #[derive(Debug, Serialize, Deserialize)] @@ -80,7 +79,6 @@ pub mod types { #[derive(Debug, Serialize, Deserialize)] pub struct GenerateZKPJobResult { pub job_id: String, - // pub response: T, pub response: serde_json::Value, pub proof: StarkProof, } @@ -99,8 +97,6 @@ pub mod types { } } - /// Generic type for proving response - #[derive(Debug, Serialize, Deserialize)] pub struct GenericProvingResponse { pub response: Value, @@ -128,25 +124,25 @@ pub mod types { } #[derive(Debug, Serialize, Deserialize, Clone)] - pub struct FibonnacciProvingRequest { + pub struct FibonacciProvingRequest { pub log_size: u32, pub claim: u32, } #[derive(Debug, Serialize, Deserialize, Clone)] - pub struct MultiFibonnacciProvingRequest { + pub struct MultiFibonacciProvingRequest { pub log_sizes: Vec, pub claims: Vec, } #[derive(Debug, Serialize, Deserialize)] - pub struct FibonnacciProvingResponse { + pub struct FibonacciProvingResponse { pub log_size: u32, pub claim: u32, pub proof: StarkProof, } - impl FibonnacciProvingResponse { + impl FibonacciProvingResponse { pub fn new(log_size: u32, claim: u32, proof: StarkProof) -> Self { Self { log_size, @@ -156,7 +152,7 @@ pub mod types { } } - impl Clone for FibonnacciProvingResponse { + impl Clone for FibonacciProvingResponse { fn clone(&self) -> Self { // Temporarily use serde for a dirty clone // TODO: Implement a proper clone or find a better design that does not require cloning @@ -172,13 +168,13 @@ pub mod types { } #[derive(Debug, Serialize, Deserialize, Clone)] - pub struct WideFibonnacciProvingRequest { - pub log_fibonnacci_size: u32, + pub struct WideFibonacciProvingRequest { + pub log_fibonacci_size: u32, pub log_n_instances: u32, } #[derive(Debug, Serialize, Deserialize)] - pub struct WideFibonnacciProvingResponse { + pub struct WideFibonacciProvingResponse { pub log_size: u32, pub claim: u32, pub proof: StarkProof, @@ -187,8 +183,6 @@ pub mod types { #[derive(Debug, Serialize, Deserialize, Clone)] pub struct PoseidonProvingRequest { pub log_n_instances: u32, - // pub lookup_elements: stwo_prover::constraint_framework::logup::LookupElements, - // pub claimed_sum: stwo_prover::core::fields::qm31::SecureField, } #[derive(Debug, Serialize, Deserialize)] diff --git a/crates/core/src/dvm/service_provider.rs b/crates/core/src/dvm/service_provider.rs index 6b52713..90beb6a 100644 --- a/crates/core/src/dvm/service_provider.rs +++ b/crates/core/src/dvm/service_provider.rs @@ -1,3 +1,4 @@ +use std::collections::HashMap; // use std::collections::HashMap; use std::error::Error; @@ -177,51 +178,49 @@ impl ServiceProvider { async fn handle_event(&self, event: Box) -> Result<(), ServiceProviderError> { info!("Proving request received [{}]", event.id); + let tags = event.tags.clone(); let job_id = event.id.to_string(); - // let tags = &event.tags; - // let params = extract_params_from_tags(tags); - - let zkp_request = ServiceProvider::deserialize_zkp_request_data(&event.content.to_owned())?; - // println!("request value {:?}", request_value); + let zkp_request = + match ServiceProvider::deserialize_zkp_request_data(&event.content.to_owned()) { + Ok(zkp) => zkp, + Err(e) => { + println!("{:?}", e); + return Err(e); + } + }; println!("zkp_request {:?}", zkp_request); let params_program: Option = zkp_request.program.clone(); - let params_inputs; - // let mut successful_parses = HashMap::new(); - // let mut successful_parses; - + // let params_inputs= new HashMap() + let mut params_inputs: HashMap = HashMap::new(); // TODO Check strict if user have sent a good request if let Some(program_params) = params_program.clone() { println!("params_program {:?}", params_program); - - let successful_parses = convert_inputs_to_run_program(program_params.inputs); - // params_inputs = program_params.inputs.clone(); - params_inputs = successful_parses.clone(); - println!("params_inputs {:?}", params_inputs); + if let Some(inputs) = program_params.inputs { + let successful_parses = convert_inputs_to_run_program(inputs); + params_inputs = successful_parses.clone(); + println!("params_inputs {:?}", params_inputs); + } else { + let successful_parses = extract_params_from_tags(&tags); + successful_parses.into_iter().for_each(|(k, v)| { + let val: Value = serde_json::to_value(v).unwrap(); + params_inputs.insert(k.clone(), val.clone()); + }); + // let inputs_values:HashMap= successful_parses + // .into_iter() + // .map(|(k, v)| { + // let val:Value= serde_json::to_value(v).unwrap(); + // params_inputs.insert(k.clone(), val.clone()); + // return (k, val) + // }) + // .collect(); + // params_inputs = inputs_values; + } } else { println!("program_params {:?}", params_program); } - // for (key, value) in params_inputs.into_iter() { - // println!("{} / {}", key, value); - // let tag = Tag::parse(&["param", &key.to_owned(), &value.to_owned()]); - // tags.push(tag.unwrap()) - // // map.remove(key); - // } - - // let log_size = params - // .get("log_size") - // .and_then(|s| s.parse::().ok()) - // .unwrap(); - // let claim = params - // .get("claim") - // .and_then(|s| s.parse::().ok()) - // .unwrap(); - - // let request = FibonnacciProvingRequest { log_size, claim }; let request_str = serde_json::to_string(&zkp_request.request).unwrap(); - // let request_str = serde_json::to_string(&request).unwrap(); let request_value = serde_json::from_str(&request_str).unwrap(); - println!("request_str {:?}", request_str); if let Some(status) = self.db.get_request_status(&job_id)? { @@ -255,7 +254,6 @@ impl ServiceProvider { let job_result = GenerateZKPJobResult { job_id: job_id.clone(), response: value_answer, - // response:serde_json::from_value(response.clone()).unwrap(), proof: response.proof, }; @@ -299,7 +297,6 @@ impl ServiceProvider { event: Box, ) -> Result<(), ServiceProviderError> { info!("LAUNCH_PROGRAM request received [{}]", event.id); - let job_id = event.id.to_string(); println!("job_id {:?}", job_id); @@ -307,19 +304,9 @@ impl ServiceProvider { let params = extract_params_from_tags(tags); println!("params {:?}", params); - - println!("event {:?}", event.content); - - // Deserialze content - // let zkp_request = - // ServiceProvider::deserialize_zkp_request_data(&event.content.to_owned())?; - // let params_program: Option = zkp_request.program.clone(); - // println!("zkp_request {:?}", zkp_request); - // Request on the content // Check request of the launch_program let request_str = serde_json::to_string(&event.content).unwrap(); - // let request_str = serde_json::to_string(&zkp_request.request).unwrap(); let request_value: Value = serde_json::from_str(&request_str).unwrap(); println!("request_value {:?}", request_value); @@ -328,6 +315,7 @@ impl ServiceProvider { let program_value: Value = serde_json::from_str(&request_str).unwrap(); println!("program_value {:?}", program_value); + // Deserialze content let zkp_request = match ServiceProvider::deserialize_zkp_request_data(&event.content.to_owned()) { Ok(zkp) => zkp, diff --git a/crates/core/src/prover_service.rs b/crates/core/src/prover_service.rs index ea5459a..d7ecb57 100644 --- a/crates/core/src/prover_service.rs +++ b/crates/core/src/prover_service.rs @@ -7,21 +7,21 @@ use stwo_prover::core::circle::M31_CIRCLE_LOG_ORDER; use stwo_prover::core::fields::m31::BaseField; use stwo_prover::core::prover::ProvingError; use stwo_prover::core::vcs::blake2_merkle::Blake2sMerkleHasher; -use stwo_wasm::fibonnaci::Fibonacci; +use stwo_wasm::fibonacci::Fibonacci; use stwo_wasm::poseidon::{PoseidonStruct, LOG_N_LANES, N_LOG_INSTANCES_PER_ROW}; -use stwo_wasm::wide_fibonnacci::WideFibStruct; +use stwo_wasm::wide_fibonacci::WideFibStruct; use thiserror::Error; use crate::dvm::types::{ ContractUploadType, - FibonnacciProvingRequest, - FibonnacciProvingResponse, + FibonacciProvingRequest, + FibonacciProvingResponse, GenericProvingResponse, PoseidonProvingRequest, ProgramInternalContractName, ProgramParams, - WideFibonnacciProvingRequest, - // MultiFibonnacciProvingRequest + WideFibonacciProvingRequest, + // MultiFibonacciProvingRequest }; // use stwo_wasm::fibonnaci::multi_fibonacci::MultiFibonacci; use crate::utils::convert_inputs_to_run_program; @@ -48,11 +48,11 @@ pub struct ProverService {} impl ProverService { pub fn generate_proof( &self, - request: FibonnacciProvingRequest, - ) -> Result { + request: FibonacciProvingRequest, + ) -> Result { let fib = Fibonacci::new(request.log_size, BaseField::from(request.claim)); match fib.prove() { - Ok(proof) => Ok(FibonnacciProvingResponse::new( + Ok(proof) => Ok(FibonacciProvingResponse::new( request.log_size, request.claim, proof, @@ -69,7 +69,9 @@ impl ProverService { println!("generate_proof_by_program type {:?}", request); let mut successful_parses = HashMap::new(); if let Some(program_params) = program_params.clone() { - successful_parses = convert_inputs_to_run_program(program_params.inputs); + if let Some(inputs) = program_params.inputs { + successful_parses = convert_inputs_to_run_program(inputs); + } } let serialized_request = serde_json::to_string(&successful_parses).unwrap(); // TODO @@ -137,7 +139,7 @@ impl ProverService { // Err(e) => Err(e.to_string()), // } } - ProgramInternalContractName::MultiFibonnaciProvingRequest => { + ProgramInternalContractName::MultiFibonacciProvingRequest => { println!("WIP FIX Multi Fibonnacci WASM"); Err(ProvingError::ConstraintsNotSatisfied.to_string()) @@ -231,17 +233,17 @@ impl ProverService { } // Err(ProvingError::ConstraintsNotSatisfied.to_string()) } - ProgramInternalContractName::WideFibonnaciProvingRequest => { + ProgramInternalContractName::WideFibonacciProvingRequest => { // Err(ProvingError::ConstraintsNotSatisfied.to_string()) - let wide_fib_serde: SerdeResult = + let wide_fib_serde: SerdeResult = serde_json::from_str(serialized_request); let wide_fib_req = match wide_fib_serde.as_ref() { Ok(req) => req.clone(), Err(e) => return Err(e.to_string()), }; let wide_fib = WideFibStruct::new( - wide_fib_req.log_fibonnacci_size, + wide_fib_req.log_fibonacci_size, wide_fib_req.log_n_instances, ); match wide_fib.prove::() { diff --git a/crates/core/src/utils.rs b/crates/core/src/utils.rs index 9b92ce9..0cd5506 100644 --- a/crates/core/src/utils.rs +++ b/crates/core/src/utils.rs @@ -36,54 +36,3 @@ pub fn convert_inputs_to_run_program( successful_parses } - -// pub fn deserialize_inputs<'de, D>(deserializer: D) -> Result, D::Error> -// where -// D: Deserializer<'de>, -// { -// let value = Value::deserialize(deserializer)?; -// if let Value::Object(map) = value { -// let result = map -// .into_iter() -// .map(|(k, v)| { -// v.as_str() -// .map(|s| (k, s.to_string())) -// .ok_or_else(|| serde::de::Error::custom("All values must be strings")) -// }) -// .collect(); -// result -// } else { -// Err(serde::de::Error::custom("inputs must be an object")) -// } -// } - -// fn deserialize_inputs<'de, D>(deserializer: D) -> Result, -// D::Error> where -// D: Deserializer<'de>, -// { -// let val: Value = Deserialize::deserialize(deserializer)?; -// match val { -// Value::Object(map) => map -// .into_iter() -// .map(|(k, v)| match v.as_str() { -// Some(str_val) => Ok((k, str_val.to_string())), -// None => Err(serde::de::Error::custom( -// "Expected a string value in the map", -// )), -// }) -// .collect(), -// _ => Err(serde::de::Error::custom("Expected a map for inputs")), -// } -// } - -// #[derive(Debug, Serialize, Deserialize, Clone)] -// pub struct GenerateZKPJobRequest { -// pub request: T, -// pub program: ProgramParams, -// } - -// impl GenerateZKPJobRequest { -// pub fn new(request: T, program: ProgramParams) -> Self { -// Self { request, program } -// } -// } diff --git a/crates/core/src/verifier_service.rs b/crates/core/src/verifier_service.rs index d9efd97..3022248 100644 --- a/crates/core/src/verifier_service.rs +++ b/crates/core/src/verifier_service.rs @@ -1,14 +1,14 @@ use serde::{Deserialize, Serialize}; use stwo_prover::core::fields::m31::BaseField; use stwo_prover::core::prover::VerificationError; -use stwo_wasm::fibonnaci::Fibonacci; +use stwo_wasm::fibonacci::Fibonacci; -use crate::dvm::types::{FibonnacciProvingResponse, GenericProvingResponse}; +use crate::dvm::types::{FibonacciProvingResponse, GenericProvingResponse}; // Define an enum to encapsulate possible deserialized types #[derive(Serialize, Deserialize, Debug)] #[serde(tag = "type")] enum ProgramType { - Fibonnacci(FibonnacciProvingResponse), + Fibonacci(FibonacciProvingResponse), Poseidon(GenericProvingResponse), Generic(GenericProvingResponse), } @@ -19,7 +19,7 @@ pub struct VerifierService {} impl VerifierService { pub fn verify_proof( &self, - response: FibonnacciProvingResponse, + response: FibonacciProvingResponse, ) -> Result<(), VerificationError> { let fib = Fibonacci::new(response.log_size, BaseField::from(response.claim)); fib.verify(response.proof) @@ -31,7 +31,7 @@ impl VerifierService { ) -> Result<(), VerificationError> { let data: ProgramType = serde_json::from_value(response).unwrap(); match data { - ProgramType::Fibonnacci(fib_answer) => { + ProgramType::Fibonacci(fib_answer) => { let fib = Fibonacci::new(fib_answer.log_size, BaseField::from(fib_answer.claim)); fib.verify(fib_answer.proof) } diff --git a/crates/stwo_wasm/src/fibonnaci/air.rs b/crates/stwo_wasm/src/fibonacci/air.rs similarity index 100% rename from crates/stwo_wasm/src/fibonnaci/air.rs rename to crates/stwo_wasm/src/fibonacci/air.rs diff --git a/crates/stwo_wasm/src/fibonnaci/component.rs b/crates/stwo_wasm/src/fibonacci/component.rs similarity index 100% rename from crates/stwo_wasm/src/fibonnaci/component.rs rename to crates/stwo_wasm/src/fibonacci/component.rs diff --git a/crates/stwo_wasm/src/fibonnaci/mod.rs b/crates/stwo_wasm/src/fibonacci/mod.rs similarity index 94% rename from crates/stwo_wasm/src/fibonnaci/mod.rs rename to crates/stwo_wasm/src/fibonacci/mod.rs index 7945676..da6be21 100644 --- a/crates/stwo_wasm/src/fibonnaci/mod.rs +++ b/crates/stwo_wasm/src/fibonacci/mod.rs @@ -7,7 +7,6 @@ pub mod multi_fibonacci; use air::FibonacciAir; use num_traits::One; -// use serde::{Deserialize, Serialize}; use stwo_prover::core::backend::cpu::CpuCircleEvaluation; use stwo_prover::core::backend::CpuBackend; use stwo_prover::core::channel::{Blake2sChannel, Channel}; @@ -31,15 +30,8 @@ use stwo_prover::trace_generation::{ // commit_and_prove, commit_and_verify, }; -// use stwo_prover::core::pcs::{ CommitmentSchemeVerifier}; - -// use stwo_prover::trace_generation::{commit_and_prove, commit_and_verify}; use wasm_bindgen::prelude::*; -// use num_traits::One; - -// use self::air::{FibonacciAir, MultiFibonacciAir}; - #[wasm_bindgen] extern "C" { // Use `js_namespace` here to bind `console.log(..)` instead of just @@ -91,26 +83,18 @@ impl Fibonacci { } pub fn prove(&self) -> Result, ProvingError> { - println!("channel"); - let channel = &mut Blake2sChannel::new(Blake2sHasher::hash(BaseField::into_slice(&[self .component .claim]))); - println!("twiddles"); let twiddles = CpuBackend::precompute_twiddles( CanonicCoset::new(self.component.log_size) .circle_domain() .half_coset, ); - println!("commitment_scheme"); let commitment_scheme = &mut CommitmentSchemeProver::new(LOG_BLOWUP_FACTOR, &twiddles); - println!("get trace"); - // let trace = self.get_trace(); - - println!("trace_domain"); // let trace_domain = CanonicCoset::new(self.component.log_size, self.component.claim); // let trace = trace diff --git a/crates/stwo_wasm/src/fibonnaci/multi_fibonacci.rs b/crates/stwo_wasm/src/fibonacci/multi_fibonacci.rs similarity index 55% rename from crates/stwo_wasm/src/fibonnaci/multi_fibonacci.rs rename to crates/stwo_wasm/src/fibonacci/multi_fibonacci.rs index e35ea31..9ef1555 100644 --- a/crates/stwo_wasm/src/fibonnaci/multi_fibonacci.rs +++ b/crates/stwo_wasm/src/fibonacci/multi_fibonacci.rs @@ -9,13 +9,9 @@ use stwo_prover::core::poly::BitReversedOrder; use stwo_prover::core::prover::{ProvingError, StarkProof, VerificationError}; use stwo_prover::core::vcs::blake2_merkle::Blake2sMerkleHasher; use stwo_prover::examples::wide_fibonacci::component::WideFibComponent; -// use stwo_prover::examples::fibonacci::MultiFibonacci; -// use stwo_prover::core::vcs::blake2_hash::Blake2sHasher; -// use stwo_prover::core::channel::{Blake2sChannel, Channel}; -// use stwo_prover::core::fields::IntoSlice; use wasm_bindgen::prelude::*; -use crate::fibonnaci::Fibonacci; +use crate::fibonacci::Fibonacci; use crate::StwoResult; #[wasm_bindgen] @@ -61,24 +57,16 @@ impl MultiFibonacci { }) .collect() } + // TODO finish implement prove pub fn prove(&self) -> Result, ProvingError> { println!("try proof of multi fibo"); - - // let channel = - // &mut Blake2sChannel::new(Blake2sHasher::hash(BaseField::into_slice(&self.claims))); - // let trace = self.get_trace(); Err(ProvingError::ConstraintsNotSatisfied) } - + // TODO finish implement verify pub fn verify(&self, proof: StarkProof) -> Result<(), VerificationError> { - // println!("try verify proof of multi fibo"); println!("try verify proof of multi fibo"); println!("stark proof {:?}", proof); - // println!("stark proof {:?}", proof.commitment_scheme_proof.proof_of_work.nonce); - // let channel = - // &mut Blake2sChannel::new(Blake2sHasher::hash(BaseField::into_slice(&self.claims))); - // commit_and_verify(proof, &self, channel) Err(VerificationError::OodsNotMatching) } } @@ -121,80 +109,7 @@ pub fn stark_proof_multi_fibo(log_sizes: Vec, claims_int: Vec) -> Stwo } } -// #[wasm_bindgen] -// pub fn stark_proof_multi_fibo(log_sizes: Vec, claims_int: Vec) -> StwoResult { -// let claims: Vec = claims_int -// .into_iter() -// .map(m31::M31::from_u32_unchecked) -// .collect(); -// let multi_fibo = MultiFibonacci::new(log_sizes, claims); - -// match multi_fibo.prove() { -// Ok(proof) => { -// console_log!("Proof deserialized successfully"); -// match multi_fibo.verify(proof) { -// Ok(()) => { -// console_log!("Proof verified successfully"); -// StwoResult { -// success: true, -// message: "Proof verified successfully".to_string(), -// } -// } -// Err(e) => { -// console_log!("Proof verification failed: {:?}", e); -// StwoResult { -// success: false, -// message: format!("Proof verification failed: {:?}", e), -// } -// } -// } -// } -// Err(e) => { -// console_log!("Failed to deserialize proof: {:?}", e); -// StwoResult { -// success: false, -// message: format!("Failed to deserialize proof: {:?}", e), -// } -// } -// } -// } - -// #[wasm_bindgen] -// pub fn verify_stark_proof_multi_fibo( -// log_sizes: Vec, -// claims_int: Vec, -// stark_proof_str: &str, -// ) -> StwoResult { -// let claims: Vec = claims_int -// .into_iter() -// .map(m31::M31::from_u32_unchecked) -// .collect(); -// let multi_fibo = MultiFibonacci::new(log_sizes, claims); -// // StwoResult { -// // success: false, -// // message: format!("Proof verification failed: {:?}", "no generic value"), -// // } -// let stark_proof: Result, serde_json::Error> = -// serde_json::from_str(stark_proof_str); -// match multi_fibo.verify(stark_proof.unwrap()) { -// Ok(()) => { -// console_log!("Proof verified successfully"); -// StwoResult { -// success: true, -// message: "Proof verified successfully".to_string(), -// } -// } -// Err(e) => { -// console_log!("Proof verification failed: {:?}", e); -// StwoResult { -// success: false, -// message: format!("Proof verification failed: {:?}", e), -// } -// } -// } -// } - -// #[wasm_bindgen] +#[wasm_bindgen] pub fn verify_stark_proof_multi_fibo( log_sizes: Vec, claims_int: Vec, @@ -205,10 +120,6 @@ pub fn verify_stark_proof_multi_fibo( .map(m31::M31::from_u32_unchecked) .collect(); let multi_fibo = MultiFibonacci::new(log_sizes, claims); - // StwoResult { - // success: false, - // message: format!("Proof verification failed: {:?}", "no generic value"), - // } let stark_proof: Result, serde_json::Error> = serde_json::from_str(stark_proof_str); match multi_fibo.verify(stark_proof.unwrap()) { diff --git a/crates/stwo_wasm/src/lib.rs b/crates/stwo_wasm/src/lib.rs index d4f6127..c8b34e4 100644 --- a/crates/stwo_wasm/src/lib.rs +++ b/crates/stwo_wasm/src/lib.rs @@ -1,11 +1,9 @@ // lib.rs pub mod poseidon; -pub mod wide_fibonnacci; +pub mod wide_fibonacci; // Deprecated program examples on the STWO // Recreate it internally -pub mod fibonnaci; -// pub mod multi_fibonacci; - +pub mod fibonacci; use poseidon::PoseidonStruct; use serde::{Deserialize, Serialize}; use stwo_prover::core::prover::StarkProof; @@ -46,15 +44,12 @@ impl StwoResult { } #[wasm_bindgen] -// pub fn prove_and_verify(log_size: u32, claim: u32) -> StwoResult { pub fn prove_and_verify(log_n_instances: u32) -> StwoResult { console_log!( "Starting prove_and_verify with log_n_instances: {}", log_n_instances, ); - let poseidon = PoseidonStruct::new(log_n_instances); - match poseidon { Err(e) => StwoResult { success: false, diff --git a/crates/stwo_wasm/src/wide_fibonnacci.rs b/crates/stwo_wasm/src/wide_fibonacci.rs similarity index 97% rename from crates/stwo_wasm/src/wide_fibonnacci.rs rename to crates/stwo_wasm/src/wide_fibonacci.rs index 09a1db5..c380397 100644 --- a/crates/stwo_wasm/src/wide_fibonnacci.rs +++ b/crates/stwo_wasm/src/wide_fibonacci.rs @@ -61,10 +61,8 @@ impl WideFibStruct { .map(|i| Input { a: m31::M31::from_u32_unchecked(i), b: m31::M31::from_u32_unchecked(i), - // b: m31!(i), }) .collect(); - // let trace = wide_fib.get_trace(); let trace = gen_trace(&self.air.component.clone(), private_input); let trace_domain = CanonicCoset::new(self.air.component.log_column_size()); let trace = trace @@ -73,9 +71,12 @@ impl WideFibStruct { .collect(); let prover_channel = &mut Blake2sChannel::new(Blake2sHasher::hash(BaseField::into_slice(&[]))); - // let res_proof = commit_and_prove::(&self.air, prover_channel, trace); let res_proof: Result, ProvingError> = commit_and_prove(&self.air, prover_channel, trace); + match res_proof { + Ok(r) => Ok(r), + Err(e) => Err(e), + } // let res_proof = prove( // &self.air, // prover_channel, @@ -83,11 +84,6 @@ impl WideFibStruct { // None, // ) // .map_err(|op| Err::, ProvingError>(op)); - - match res_proof { - Ok(r) => Ok(r), - Err(e) => Err(e), - } // res_proof }