Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bulk collection updated implementation #23

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@ AWS_SECRET_ACCESS_KEY=
NODE_TLS_REJECT_UNAUTHORIZED=1
SKIP_MODERATION=true

DEBUG=true
DEBUG=true
127 changes: 70 additions & 57 deletions constants.js
Original file line number Diff line number Diff line change
@@ -1,70 +1,83 @@
const TESTNET = 'reef_testnet';
const MAINNET = 'reef_mainnet';
const TESTNET = "reef_testnet";
const MAINNET = "reef_mainnet";

const networks = {
[TESTNET]: {
rpc: 'wss://rpc-testnet.reefscan.com/ws',
contracts: {
marketplace: '0x31939DF5c6A5ac0b574EDE6E610Fd30c08788A53',
erc1155: '0x9FdEb478A27E216f80DaEE0967dc426338eD02f2',
utility: '0x8E7Ef6bD28cD9bDb6DBf105140958ac03EeC371A',
multicall: '0x399d847d3D8F6b9F9A30e4Dc9C89F65a4EF65821'
},
typesense: {
collections: {
collections: 'testnet_collections',
users: 'testnet_users',
collectibles: 'testnet_collectibles',
},
host: 'search.sqwid.app'
},
useCache: false,
graphql_api_explorer: 'https://squid.subsquid.io/reef-explorer-testnet/graphql',
graphql_api_marketplace: 'https://squid.subsquid.io/sqwid-marketplace-testnet/graphql',
[TESTNET]: {
rpc: "wss://rpc-testnet.reefscan.com/ws",
contracts: {
marketplace: "0x614b7B6382524C32dDF4ff1f4187Bc0BAAC1ed11",
erc1155: "0x9b9a32c56c8F5C131000Acb420734882Cc601d39",
utility: "0xEf1c5ad26cE1B42315113C3561B4b2abA0Ba64B3",
multicall: "0x399d847d3D8F6b9F9A30e4Dc9C89F65a4EF65821",
},
[MAINNET]: {
rpc: 'wss://rpc.reefscan.com/ws',
contracts: {
marketplace: "0xB13Be9656B243600C86922708C20606f5EA89218",
erc1155: "0x0601202b75C96A61CDb9A99D4e2285E43c6e60e4",
utility: "0xffb12A5f69AFBD58Dc49b4AE9044D8F20D131733",
multicall: "0x137A7237e2a5f7f2eEE1C1471fbb26d0be8Fcc60"
},
typesense: {
collections: {
collections: 'mainnet_collections',
users: 'mainnet_users',
collectibles: 'mainnet_collectibles',
},
host: 'search.sqwid.app'
},
useCache: false,
graphql_api_explorer: 'https://squid.subsquid.io/reef-explorer/graphql',
graphql_api_marketplace: 'https://squid.subsquid.io/sqwid-marketplace/graphql',
}
}
typesense: {
collections: {
collections: "testnet_collections",
users: "testnet_users",
collectibles: "testnet_collectibles",
},
host: "search.sqwid.app",
},
useCache: false,
graphql_api_explorer:
"https://squid.subsquid.io/reef-explorer-testnet/graphql",
graphql_api_marketplace:
"https://reef.squids.live/sqwid-marketplace-testnet:prod/api/graphql",
},
[MAINNET]: {
rpc: "wss://rpc.reefscan.com/ws",
contracts: {
marketplace: "0xB13Be9656B243600C86922708C20606f5EA89218",
erc1155: "0x0601202b75C96A61CDb9A99D4e2285E43c6e60e4",
utility: "0xffb12A5f69AFBD58Dc49b4AE9044D8F20D131733",
multicall: "0x137A7237e2a5f7f2eEE1C1471fbb26d0be8Fcc60",
},
typesense: {
collections: {
collections: "mainnet_collections",
users: "mainnet_users",
collectibles: "mainnet_collectibles",
},
host: "search.sqwid.app",
},
useCache: false,
graphql_api_explorer: "https://squid.subsquid.io/reef-explorer/graphql",
graphql_api_marketplace:
"https://squid.subsquid.io/sqwid-marketplace/graphql",
},
};

const TEMP_PATH = "./temp-uploads/";

const config = {
testnet: {
marketplaceContractAddress: '0x31939DF5c6A5ac0b574EDE6E610Fd30c08788A53',
nftContractAddress: '0x9FdEb478A27E216f80DaEE0967dc426338eD02f2',
explorerGraphqlUrl: 'https://squid.subsquid.io/reef-explorer-testnet/graphql',
marketplaceGraphqlUrl: 'https://squid.subsquid.io/sqwid-marketplace-testnet/graphql',
},
mainnet: {
marketplaceContractAddress: '0xB13Be9656B243600C86922708C20606f5EA89218',
nftContractAddress: '0x0601202b75C96A61CDb9A99D4e2285E43c6e60e4',
explorerGraphqlUrl: 'https://squid.subsquid.io/reef-explorer/graphql',
marketplaceGraphqlUrl: 'https://squid.subsquid.io/sqwid-marketplace/graphql',
}
}
testnet: {
marketplaceContractAddress: "0x614b7B6382524C32dDF4ff1f4187Bc0BAAC1ed11",
nftContractAddress: "0x9b9a32c56c8F5C131000Acb420734882Cc601d39",
explorerGraphqlUrl:
"https://squid.subsquid.io/reef-explorer-testnet/graphql",
marketplaceGraphqlUrl:
"https://reef.squids.live/sqwid-marketplace-testnet:prod/api/graphql",
},
mainnet: {
marketplaceContractAddress: "0xB13Be9656B243600C86922708C20606f5EA89218",
nftContractAddress: "0x0601202b75C96A61CDb9A99D4e2285E43c6e60e4",
explorerGraphqlUrl: "https://squid.subsquid.io/reef-explorer/graphql",
marketplaceGraphqlUrl:
"https://squid.subsquid.io/sqwid-marketplace/graphql",
},
};

const envNetwork = Object.keys(networks).find((prop)=>prop===process.env.NETWORK);
const envNetwork = Object.keys(networks).find(
(prop) => prop === process.env.NETWORK
);

if (!envNetwork) {
throw new Error('process.env.NETWORK value='+ process.env.NETWORK +' not a property name of '+ Object.keys(networks));
throw new Error(
"process.env.NETWORK value=" +
process.env.NETWORK +
" not a property name of " +
Object.keys(networks)
);
}

const moderators = process.env.MODERATORS?.split(',')??[];
Expand Down
1 change: 1 addition & 0 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ app.use (helmet ());
if (process.env.ENABLE_CORS==='true') {
const cors = require ('cors');
app.use (cors ());

}
app.use (express.json ({ limit: "100mb" }));
app.use (express.urlencoded ({extended: true, limit: "100mb"}));
Expand Down
226 changes: 163 additions & 63 deletions lib/collection.js
Original file line number Diff line number Diff line change
@@ -1,77 +1,177 @@
const sharp = require('sharp');
const { getEVMAddress } = require('./getEVMAddress');
const { initIpfs } = require('./IPFS');
const firebase = require('./firebase');
const sharp = require("sharp");
const { getEVMAddress } = require("./getEVMAddress");
const { initIpfs } = require("./IPFS");
const firebase = require("./firebase");
const { TEMP_PATH } = require("../constants");
const fs = require("fs");
const { create, globSource } = require("ipfs-http-client");
// const fs = require("fs");
const path = require("path");

const generateLogo = async file => {
const data = await sharp(file)
.resize({
width: 128,
height: 128,
fit: sharp.fit.inside,
withoutEnlargement: true
})
.webp ()
.toBuffer ();
return data;
}
const generateLogo = async (file) => {
const data = await sharp(file)
.resize({
width: 128,
height: 128,
fit: sharp.fit.inside,
withoutEnlargement: true,
})
.webp()
.toBuffer();
return data;
};

const generateThumbnail = async file => {
const data = await sharp (file)
.resize ({
width: 512,
height: 512,
fit: sharp.fit.inside,
withoutEnlargement: true
})
.webp ()
.toBuffer ();
return data;
}
const generateThumbnail = async (file) => {
const data = await sharp(file)
.resize({
width: 512,
height: 512,
fit: sharp.fit.inside,
withoutEnlargement: true,
})
.webp()
.toBuffer();
return data;
};

// const uploadToIPFS = async file => {
// const ipfs = initIpfs();
// console.log(file)
// // const buffer = file.arrayBuffer ? await file.arrayBuffer() : file;
// // const addedFile = await ipfs.add(buffer);
// // await ipfs.pin.add (addedFile.path);
// // return addedFile.path;
// return "";
// }
// old pandey code
const uploadToIPFS = async (buffer) => {
try {
const ipfs = initIpfs();
const { cid } = await ipfs.add(buffer);
return cid.toString();
} catch (error) {
console.error("Error uploading to IPFS:", error);
}
};

const uploadBulkToIPFS = async (file) => {
const ipfs = initIpfs();
const buffer = file.arrayBuffer ? await file.arrayBuffer() : file;
const addedFile = await ipfs.add(buffer);
await ipfs.pin.add(addedFile.path);
return addedFile.path;
};

const uploadBulkCollectionToIPFS = async (dirPath) => {
try {
const ipfs = initIpfs();
const { cid } = await ipfs.add(buffer);
return cid.toString();
if (!fs.existsSync(dirPath)) {
throw new Error(`Directory does not exist: ${dirPath}`);
}

const files = fs.readdirSync(dirPath);
if (files.length === 0) {
throw new Error(`No files found in directory: ${dirPath}`);
}

const ipfs = initIpfs();
const addedFiles = [];

// Process each file as a readable stream and upload to IPFS
for (const file of files) {
const filePath = path.join(dirPath, file);

const fileStream = fs.createReadStream(filePath); // Create a readable stream

// Add the stream to IPFS
const result = await ipfs.add({ content: fileStream, path: file });
addedFiles.push({
path: file,
cid: result.cid.toString(),
});
}

// Format files with `ipfs://` URI scheme
const crAddedFiles = addedFiles.map(file => ({
path: file.path,
cid: `ipfs://${file.cid}`,
}));

// Pin each file individually
for (const file of addedFiles) {
await ipfs.pin.add(file.cid);
}

// Assume the last added is the root directory or most recent file
const rootDir = addedFiles[addedFiles.length - 1];
if (!rootDir || !rootDir.cid) {
throw new Error("Root directory or file CID not found in added files");
}

// Return all files' data, including root directory CID
return {
rootDirCid: rootDir.cid,
files: crAddedFiles,
};
// return rootDir;
} catch (error) {
console.error('Error uploading to IPFS:', error);
console.error("Error in uploadBulkCollectionToIPFS:", error.message);
throw error;
}
};


const uploadMetadataToIPFS = async (metadataArray) => {
try {
const dir = `${TEMP_PATH}metadata`;

// Ensure directory exists
if (!fs.existsSync(dir)) fs.mkdirSync(dir);
// Write metadata files
metadataArray.forEach((metadata, index) => {
const filePath = `${dir}/metadata_${index + 1}.json`;
fs.writeFileSync(filePath, JSON.stringify(metadata));
});

// Upload the directory to IPFS
const metadataUri = await uploadBulkCollectionToIPFS(dir);

// Clean up the temporary directory
// fs.rmSync(dir, { recursive: true });
return metadataUri;
} catch (error) {
console.error("Error in uploadMetadataToIPFS:", error);
throw error;
}
};

const newCollection = async (ownerAddress, name, description, file) => {
const ownerEVMAddress = await getEVMAddress (ownerAddress);
let col = {
name: name || '',
description: description || '',
owner: ownerEVMAddress,
created: new Date ().getTime (),
image: '',
traits: {}
}
const ownerEVMAddress = await getEVMAddress(ownerAddress);
let col = {
name: name || "",
description: description || "",
owner: ownerEVMAddress,
created: new Date().getTime(),
image: "",
traits: {},
};

try {
const logoPromise = generateLogo (file.buffer);
const thumbnailPromise = generateThumbnail (file.buffer);
try {
const logoPromise = generateLogo(file.buffer);
const thumbnailPromise = generateThumbnail(file.buffer);

const [logo, thumbnail] = await Promise.all ([logoPromise, thumbnailPromise]);
const [logo, thumbnail] = await Promise.all([
logoPromise,
thumbnailPromise,
]);
const [logoHash, thumbnailHash] = await Promise.all([
uploadToIPFS(logo),
uploadToIPFS(thumbnail),
]);

const [logoHash, thumbnailHash] = await Promise.all ([uploadToIPFS (logo), uploadToIPFS (thumbnail)]);
col.image = `ipfs://${logoHash}`;
col.thumbnail = `ipfs://${thumbnailHash}`;
return await firebase.collection ('collections').add (col);
} catch (err) {
throw err;
}
}
col.image = `ipfs://${logoHash}`;
col.thumbnail = `ipfs://${thumbnailHash}`;
return await firebase.collection("collections").add(col);
} catch (err) {
throw err;
}
};

module.exports = { newCollection, generateLogo, generateThumbnail, uploadToIPFS };
module.exports = {
newCollection,
generateLogo,
generateThumbnail,
uploadToIPFS,
uploadBulkToIPFS,
uploadMetadataToIPFS,
};
Loading