Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/oceanprotocol/ocean-cli int…
Browse files Browse the repository at this point in the history
…o issue-70-interactive-publish-flow
  • Loading branch information
jamiehewitt15 committed Oct 15, 2024
2 parents 49be122 + 4c9c5de commit f2f5076
Show file tree
Hide file tree
Showing 5 changed files with 163 additions and 11 deletions.
14 changes: 14 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
"@typescript-eslint/eslint-plugin": "^5.60.1",
"@typescript-eslint/parser": "^5.60.1",
"chai": "^4.3.7",
"crypto": "^1.0.1",
"eslint": "^8.44.0",
"eslint-config-oceanprotocol": "^2.0.4",
"eslint-config-prettier": "^8.8.0",
Expand Down
11 changes: 10 additions & 1 deletion src/commands.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,14 @@ import {
updateAssetMetadata,
downloadFile,
isOrderable,
getMetadataURI,
} from "./helpers";
import {
Aquarius,
Asset,
ComputeAlgorithm,
ComputeJob,
ComputeOutput,
Config,
ConfigHelper,
Datatoken,
Expand Down Expand Up @@ -237,6 +239,7 @@ export class Commands {
}

public async computeStart(args: string[]) {

const inputDatasetsString = args[1];
let inputDatasets = [];

Expand Down Expand Up @@ -408,14 +411,20 @@ export class Commands {
" with additional datasets:" +
(!additionalDatasets ? "none" : additionalDatasets[0].documentId)
);

const output: ComputeOutput = {
metadataUri: await getMetadataURI()
}

const computeJobs = await ProviderInstance.computeStart(
providerURI,
this.signer,
computeEnv.id,
assets[0],
algo,
null,
additionalDatasets
additionalDatasets,
output
);
if (computeJobs && computeJobs[0]) {
const { jobId, agreementId } = computeJobs[0];
Expand Down
64 changes: 64 additions & 0 deletions src/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -370,3 +370,67 @@ export async function isOrderable(
}
return true;
}


// The ranges and the amount of usable IP's:

// 10.0.0.0 - 10.255.255.255 Addresses: 16,777,216
// 172.16.0.0 - 172.31.255.255 Addresses: 1,048,576
// 192.168.0.0 - 192.168.255.255 Addresses: 65,536

// check if IP is private or not
export function isPrivateIP(ip): boolean {

const reg = /^(127\.[\d.]+|[0:]+1|localhost)$/
const result = ip.match(reg)
if(result!==null) {
// is loopback address
return true
}
const parts = ip.split('.');
return parts[0] === '10' ||
(parts[0] === '172' && (parseInt(parts[1], 10) >= 16 && parseInt(parts[1], 10) <= 31)) ||
(parts[0] === '192' && parts[1] === '168');
}

// get public IP address using free service API
export async function getPublicIP(): Promise<string> {

try {
const response = await fetch('https://api.ipify.org?format=json')
const data = await response.json()
if(data) {
return data.ip
}
}catch(err) {
console.error('Erro getting public IP: ',err.message)
}

return null
}

export async function getMetadataURI() {
const metadataURI = process.env.AQUARIUS_URL
const parsed = new URL(metadataURI);
let ip = metadataURI // by default
// has port number?
const hasPort = parsed.port && !isNaN( Number(parsed.port))
if(hasPort) {
// remove the port, just get the host part
ip = parsed.hostname
}
// check if is private or loopback
if(isPrivateIP(ip)) {
// get public V4 ip address
ip = await getPublicIP()
if(!ip) {
return metadataURI
}
}
// if we removed the port add it back
if(hasPort) {
ip = `http://${ip}:${parsed.port}`
}
return ip
}

84 changes: 74 additions & 10 deletions test/consumeFlow.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,43 @@ import { expect } from "chai";
import { exec } from "child_process";
import path from "path";
import fs from "fs";
import crypto from "crypto";
import https from "https";

describe("Ocean CLI Publishing", function() {
this.timeout(60000); // Set a longer timeout to allow the command to execute
this.timeout(180000); // Set a longer timeout to allow the command to execute

let downloadDatasetDid: string
let downloadDatasetDid: string;
let computeDatasetDid: string;
let jsAlgoDid: string;
let pythonAlgoDid: string;

const projectRoot = path.resolve(__dirname, "..");

// Function to compute hash of a file
const computeFileHash = (filePath: string): string => {
const fileBuffer = fs.readFileSync(filePath);
const hashSum = crypto.createHash('sha256');
hashSum.update(fileBuffer);
return hashSum.digest('hex');
};

const downloadFile = async (url: string, dest: string): Promise<void> => {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(dest);
https.get(url, (response) => {
response.pipe(file);
file.on('finish', () => {
file.close(() => resolve());
});
}).on('error', (err) => {
fs.unlink(dest, () => reject(err));
});
});
};



it("should publish a dataset using 'npm run cli publish'", function(done) {
const metadataFile = path.resolve(projectRoot, "metadata/simpleDownloadDataset.json");

Expand All @@ -35,8 +61,9 @@ describe("Ocean CLI Publishing", function() {
downloadDatasetDid = match[0];
}
expect(stdout).to.contain("Asset published. ID:");
done();
done()
} catch (assertionError) {
console.log('assertionError', assertionError);
done(assertionError);
}
});
Expand All @@ -58,7 +85,7 @@ describe("Ocean CLI Publishing", function() {
computeDatasetDid = match[0];
}
expect(stdout).to.contain("Asset published. ID:");
done();
done()
} catch (assertionError) {
done(assertionError);
}
Expand All @@ -81,7 +108,7 @@ describe("Ocean CLI Publishing", function() {
if (match) {
jsAlgoDid = match[0];
}
done();
done()
} catch (assertionError) {
done(assertionError);
}
Expand All @@ -104,7 +131,7 @@ describe("Ocean CLI Publishing", function() {
if (match) {
pythonAlgoDid = match[0];
}
done();
done()
} catch (assertionError) {
done(assertionError);
}
Expand All @@ -113,15 +140,13 @@ describe("Ocean CLI Publishing", function() {

it("should get DDO using 'npm run cli getDDO' for download dataset", function(done) {
exec(`npm run cli getDDO ${downloadDatasetDid}`, { cwd: projectRoot }, (error, stdout) => {
console.log('stdout', stdout)
expect(stdout).to.contain(`${downloadDatasetDid}`);
expect(stdout).to.contain("https://w3id.org/did/v1");
expect(stdout).to.contain("Datatoken");
done()
});
});


it("should get DDO using 'npm run cli getDDO' for compute dataset", function(done) {
exec(`npm run cli getDDO ${computeDatasetDid}`, { cwd: projectRoot }, (error, stdout) => {
expect(stdout).to.contain(`${computeDatasetDid}`);
Expand All @@ -130,7 +155,7 @@ describe("Ocean CLI Publishing", function() {
done()
});
});

it("should get DDO using 'npm run cli getDDO' for JS algorithm", function(done) {
exec(`npm run cli getDDO ${jsAlgoDid}`, { cwd: projectRoot }, (error, stdout) => {
expect(stdout).to.contain(`${jsAlgoDid}`);
Expand All @@ -139,7 +164,7 @@ describe("Ocean CLI Publishing", function() {
done()
});
});

it("should get DDO using 'npm run cli getDDO' for python algorithm", function(done) {
exec(`npm run cli getDDO ${pythonAlgoDid}`, { cwd: projectRoot }, (error, stdout) => {
expect(stdout).to.contain(`${pythonAlgoDid}`);
Expand All @@ -149,4 +174,43 @@ describe("Ocean CLI Publishing", function() {
});
});

it("should download the download dataset", function(done) {
this.timeout(10000); // Increase timeout if needed

(async () => {
try {
const { stdout } = await new Promise<{ stdout: string, error: Error | null }>((resolve, reject) => {
exec(`npm run cli download ${downloadDatasetDid} .`, { cwd: projectRoot }, (error, stdout) => {
if (error) {
reject(error);
} else {
resolve({ stdout, error: null });
}
});
});

expect(stdout).to.contain("File downloaded successfully");

// Path to the downloaded file
const downloadedFilePath = './enwiki-latest-abstract10.xml.gz-rss.xml';

// Verify the downloaded file content hash matches the original file hash
const downloadedFileHash = computeFileHash(downloadedFilePath);
const originalFilePath = './metadata/enwiki-latest-abstract10.xml.gz-rss.xml';

await downloadFile("https://dumps.wikimedia.org/enwiki/latest/enwiki-latest-abstract10.xml.gz-rss.xml", originalFilePath);
const originalFileHash = computeFileHash(originalFilePath);

expect(downloadedFileHash).to.equal(originalFileHash);

// Clean up downloaded original file
fs.unlinkSync(originalFilePath);

done()
} catch (err) {
done(err);
}
})();
});

});

0 comments on commit f2f5076

Please sign in to comment.