mirror of
https://github.com/filecoin-project/slate.git
synced 2024-11-23 14:07:20 +03:00
Merge pull request #957 from filecoin-project/@martinalong/estuary
Estuary for storage deals
This commit is contained in:
commit
8b2f28c764
@ -62,6 +62,10 @@ export class Boundary extends React.PureComponent {
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.target instanceof SVGElement) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
this.props.isDataMenuCaptured &&
|
||||
typeof e.target.hasAttribute === "function" &&
|
||||
|
@ -78,7 +78,7 @@ import getActivity from "~/node_common/data/methods/get-activity";
|
||||
import getExplore from "~/node_common/data/methods/get-explore";
|
||||
|
||||
// NOTE(jim):
|
||||
// Search postgres queries
|
||||
// Summary postgres queries
|
||||
import getEverySlate from "~/node_common/data/methods/get-every-slate";
|
||||
import getEveryUser from "~/node_common/data/methods/get-every-user";
|
||||
import getEveryFile from "~/node_common/data/methods/get-every-file";
|
||||
@ -98,6 +98,10 @@ import pruneVerifications from "~/node_common/data/methods/prune-verifications";
|
||||
import createOrphan from "~/node_common/data/methods/create-orphan";
|
||||
import getAllSendgridContacts from "~/node_common/data/methods/get-all-sendgrid-contacts";
|
||||
|
||||
// NOTE(martina):
|
||||
// Deals
|
||||
import createDeal from "~/node_common/data/methods/create-deal";
|
||||
|
||||
export {
|
||||
// NOTE(jim): One-offs
|
||||
createOrphan,
|
||||
@ -112,7 +116,7 @@ export {
|
||||
getUserByTwitterId,
|
||||
recalcUserSlatecount,
|
||||
recalcUserFollowercount,
|
||||
//NOTE(martina): File operations
|
||||
// NOTE(martina): File operations
|
||||
createFile,
|
||||
getFileByCid,
|
||||
getFileByUrl,
|
||||
@ -161,11 +165,11 @@ export {
|
||||
createActivity,
|
||||
getActivity,
|
||||
getExplore,
|
||||
// NOTE(jim): Search
|
||||
// NOTE(jim): Summary
|
||||
getEverySlate,
|
||||
getEveryUser,
|
||||
getEveryFile,
|
||||
//NOTE(toast): Verification operations
|
||||
// NOTE(toast): Verification operations
|
||||
createVerification,
|
||||
getVerificationByEmail,
|
||||
getVerificationBySid,
|
||||
@ -177,4 +181,6 @@ export {
|
||||
createTwitterToken,
|
||||
getTwitterToken,
|
||||
updateTwitterToken,
|
||||
// NOTE(martina): Deals
|
||||
createDeal,
|
||||
};
|
||||
|
22
node_common/data/methods/create-deal.js
Normal file
22
node_common/data/methods/create-deal.js
Normal file
@ -0,0 +1,22 @@
|
||||
import { runQuery } from "~/node_common/data/utilities";
|
||||
|
||||
export default async ({ cids }) => {
|
||||
return await runQuery({
|
||||
label: "CREATE_DEAL",
|
||||
queryFn: async (DB) => {
|
||||
let query = await DB.insert(cids).into("deals").returning("*");
|
||||
|
||||
if (!query || query.error) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return JSON.parse(JSON.stringify(query));
|
||||
},
|
||||
errorFn: async (e) => {
|
||||
return {
|
||||
error: true,
|
||||
decorator: CREATE_DEAL,
|
||||
};
|
||||
},
|
||||
});
|
||||
};
|
@ -54,3 +54,6 @@ export const IFRAMELY_API_KEY = process.env.IFRAMELY_API_KEY;
|
||||
|
||||
//NOTE(martina): Microlink
|
||||
export const MICROLINK_API_KEY = process.env.MICROLINK_API_KEY;
|
||||
|
||||
//NOTE(martina): Estuary
|
||||
export const ESTUARY_API_KEY = process.env.ESTUARY_API_KEY;
|
||||
|
@ -28,7 +28,8 @@
|
||||
"www-setup-database": "NODE_TLS_REJECT_UNAUTHORIZED=0 node ./scripts setup-database",
|
||||
"www-seed-database": "NODE_TLS_REJECT_UNAUTHORIZED=0 node ./scripts seed-database",
|
||||
"www-adjust-database": "NODE_TLS_REJECT_UNAUTHORIZED=0 node ./scripts adjust",
|
||||
"www-migrate-database": "NODE_TLS_REJECT_UNAUTHORIZED=0 node ./scripts flattening-migration"
|
||||
"www-migrate-database": "NODE_TLS_REJECT_UNAUTHORIZED=0 node ./scripts flattening-migration",
|
||||
"www-storage-deals": "NODE_TLS_REJECT_UNAUTHORIZED=0 node ./scripts worker-storage-deals"
|
||||
},
|
||||
"repository": "filecoin-project/slate",
|
||||
"dependencies": {
|
||||
|
@ -11,27 +11,18 @@ const db = knex(envConfig);
|
||||
|
||||
Logging.log(`RUNNING: adjust.js`);
|
||||
|
||||
const renameExistingColumn = db.schema.table("users", function (table) {
|
||||
table.renameColumn("textileToken", "textileKey");
|
||||
const renameDealsTable = db.schema.renameTable("deals", "old_deals");
|
||||
|
||||
const deleteGlobalTable = db.schema.dropTable("global");
|
||||
|
||||
const deleteStatsTable = db.schema.dropTable("stats");
|
||||
|
||||
const createDealsTable = db.schema.createTable("deals", function (table) {
|
||||
table.string("cid").primary().unique().notNullable();
|
||||
table.timestamp("createdAt").notNullable().defaultTo(db.raw("now()"));
|
||||
});
|
||||
|
||||
const addNewColumns = db.schema.table("users", function (table) {
|
||||
table.string("textileToken").nullable();
|
||||
table.string("textileThreadID").nullable();
|
||||
table.string("textileBucketCID").nullable();
|
||||
});
|
||||
|
||||
const editColumnLength = db.schema.table("users", function (table) {
|
||||
table.string("textileToken", 400).nullable().alter();
|
||||
});
|
||||
|
||||
const addNewFieldsSlates = db.schema.table("slates", function (table) {
|
||||
table.string("name").nullable();
|
||||
table.string("body").nullable();
|
||||
table.string("preview").nullable();
|
||||
});
|
||||
|
||||
Promise.all([editColumnLength]);
|
||||
Promise.all([renameDealsTable]);
|
||||
|
||||
Logging.log(`FINISHED: adjust.js`);
|
||||
Logging.log(` CTRL +C to return to terminal.`);
|
||||
|
@ -111,6 +111,15 @@ const printFilesTable = async () => {
|
||||
|
||||
/* Add columns (including tags) */
|
||||
|
||||
const addUserTextileColumns = async () => [
|
||||
await DB.schema.table("users", function (table) {
|
||||
table.string("textileKey").nullable();
|
||||
table.string("textileToken", 400).nullable();
|
||||
table.string("textileThreadID").nullable();
|
||||
table.string("textileBucketCID").nullable();
|
||||
}),
|
||||
];
|
||||
|
||||
const addUserColumns = async () => {
|
||||
await DB.schema.table("users", function (table) {
|
||||
table.string("body", 2000).nullable();
|
||||
@ -118,10 +127,10 @@ const addUserColumns = async () => {
|
||||
table.string("name").nullable();
|
||||
table.string("twitterUsername").nullable();
|
||||
table.boolean("twitterVerified").notNullable().defaultTo(false);
|
||||
table.string("textileKey").nullable();
|
||||
table.string("textileToken", 400).nullable();
|
||||
table.string("textileThreadID").nullable();
|
||||
table.string("textileBucketCID").nullable();
|
||||
// table.string("textileKey").nullable();
|
||||
// table.string("textileToken", 400).nullable();
|
||||
// table.string("textileThreadID").nullable();
|
||||
// table.string("textileBucketCID").nullable();
|
||||
table.boolean("settingsDealsAutoApprove").notNullable().defaultTo(false);
|
||||
table.boolean("allowAutomaticDataStorage").notNullable().defaultTo(true);
|
||||
table.boolean("allowEncryptedDataStorage").notNullable().defaultTo(true);
|
||||
@ -188,7 +197,7 @@ const migrateUserTable = async () => {
|
||||
name: data.name,
|
||||
body: data.body,
|
||||
photo: data.photo,
|
||||
textileKey: data.tokens?.api,
|
||||
// textileKey: data.tokens?.api,
|
||||
settingsDealsAutoApprove: data.settings?.settings_deals_auto_approve,
|
||||
allowAutomaticDataStorage: data.settings?.allow_automatic_data_storage,
|
||||
allowEncryptedDataStorage: data.settings?.allow_encrypted_data_storage,
|
||||
@ -317,6 +326,7 @@ const runScript = async () => {
|
||||
// await printSlatesTable();
|
||||
// await printFilesTable();
|
||||
|
||||
await addUserTextileColumns();
|
||||
// await addUserColumns();
|
||||
// await addSlateColumns();
|
||||
// await addFileColumns();
|
||||
|
@ -18,11 +18,8 @@ Logging.log(`RUNNING: seed-database.js`);
|
||||
//replace createdat, updatedat, ownerid, owneruserid
|
||||
|
||||
const createDealsTable = db.schema.createTable("deals", function (table) {
|
||||
table.uuid("id").primary().unique().notNullable().defaultTo(db.raw("uuid_generate_v4()"));
|
||||
table.string("ownerId").nullable();
|
||||
table.jsonb("data").nullable();
|
||||
table.string("cid").primary().unique().notNullable();
|
||||
table.timestamp("createdAt").notNullable().defaultTo(db.raw("now()"));
|
||||
table.timestamp("updatedAt").notNullable().defaultTo(db.raw("now()"));
|
||||
});
|
||||
|
||||
const createUsersTable = db.schema.createTable("users", function (table) {
|
||||
|
226
scripts/worker-storage-deals.js
Normal file
226
scripts/worker-storage-deals.js
Normal file
@ -0,0 +1,226 @@
|
||||
import "isomorphic-fetch";
|
||||
|
||||
import * as Environment from "~/node_common/environment";
|
||||
import * as Constants from "~/node_common/constants";
|
||||
import * as Strings from "~/common/strings";
|
||||
import * as Logging from "~/common/logging";
|
||||
|
||||
import configs from "~/knexfile";
|
||||
import knex from "knex";
|
||||
|
||||
import { Buckets, PrivateKey, Filecoin, Client, ThreadID } from "@textile/hub";
|
||||
|
||||
const envConfig = configs["development"];
|
||||
|
||||
const DB = knex(envConfig);
|
||||
|
||||
const TEXTILE_KEY_INFO = {
|
||||
key: Environment.TEXTILE_HUB_KEY,
|
||||
secret: Environment.TEXTILE_HUB_SECRET,
|
||||
};
|
||||
|
||||
const recordTextileBucketInfoProduction = async (props) => {
|
||||
const users = await DB.select("id", "data").from("users").where({ textileToken: null });
|
||||
let userUpdates = [];
|
||||
let i = 0;
|
||||
for (let user of users) {
|
||||
if (i % 50 === 0) {
|
||||
console.log(i);
|
||||
if (userUpdates.length) {
|
||||
await pushUserUpdates(userUpdates);
|
||||
userUpdates = [];
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
try {
|
||||
const textileKey = user.data?.tokens?.api;
|
||||
if (!textileKey) {
|
||||
console.log(`ERROR: user ${user.id} does not have textile key`);
|
||||
return;
|
||||
}
|
||||
let buckets = await Buckets.withKeyInfo(TEXTILE_KEY_INFO);
|
||||
|
||||
const identity = PrivateKey.fromString(textileKey);
|
||||
const textileToken = await buckets.getToken(identity);
|
||||
buckets.context.withToken(textileToken);
|
||||
|
||||
const client = new Client(buckets.context);
|
||||
const res = await client.getThread("buckets");
|
||||
const textileThreadID =
|
||||
typeof res.id === "string" ? res.id : ThreadID.fromBytes(res.id).toString();
|
||||
buckets.context.withThread(textileThreadID);
|
||||
|
||||
const roots = await buckets.list();
|
||||
const existing = roots.find((bucket) => bucket.name === Constants.textile.mainBucket);
|
||||
|
||||
let ipfs = existing.path;
|
||||
const textileBucketCID = Strings.ipfsToCid(ipfs);
|
||||
|
||||
if (!textileToken || !textileThreadID || !textileBucketCID) {
|
||||
console.log(`ERROR ${user.id} missing some value`);
|
||||
return;
|
||||
}
|
||||
// console.log({ textileToken, textileThreadID, textileBucketCID });
|
||||
userUpdates.push({
|
||||
id: user.id,
|
||||
textileKey,
|
||||
textileToken,
|
||||
textileThreadID,
|
||||
textileBucketCID,
|
||||
});
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("SCRIPT FINISHED");
|
||||
};
|
||||
|
||||
const pushUserUpdates = async (userUpdates) => {
|
||||
let query = userUpdates.map((user) => "(?::uuid, ?, ?, ?, ?)").join(", ");
|
||||
let values = [];
|
||||
for (let user of userUpdates) {
|
||||
values.push(
|
||||
user.id,
|
||||
user.textileKey,
|
||||
user.textileToken,
|
||||
user.textileThreadID,
|
||||
user.textileBucketCID
|
||||
);
|
||||
}
|
||||
|
||||
await DB.raw(
|
||||
`UPDATE ?? as u SET ?? = ??, ?? = ??, ?? = ??, ?? = ?? from (values ${query}) as c(??, ??, ??, ??, ??) WHERE ?? = ??`,
|
||||
[
|
||||
"users",
|
||||
"textileKey",
|
||||
"c.textileKey",
|
||||
"textileToken",
|
||||
"c.textileToken",
|
||||
"textileThreadID",
|
||||
"c.textileThreadID",
|
||||
"textileBucketCID",
|
||||
"c.textileBucketCID",
|
||||
...values,
|
||||
"id",
|
||||
"textileKey",
|
||||
"textileToken",
|
||||
"textileThreadID",
|
||||
"textileBucketCID",
|
||||
"c.id",
|
||||
"u.id",
|
||||
]
|
||||
);
|
||||
};
|
||||
|
||||
const recordTextileBucketInfo = async (props) => {
|
||||
const users = await DB.select("users.id", "users.textileKey")
|
||||
.from("users")
|
||||
.where({ "users.textileToken": null });
|
||||
const userUpdates = [];
|
||||
for (let user of users) {
|
||||
try {
|
||||
let buckets = await Buckets.withKeyInfo(TEXTILE_KEY_INFO);
|
||||
|
||||
const identity = PrivateKey.fromString(user.textileKey);
|
||||
const textileToken = await buckets.getToken(identity);
|
||||
buckets.context.withToken(textileToken);
|
||||
|
||||
const client = new Client(buckets.context);
|
||||
const res = await client.getThread("buckets");
|
||||
const textileThreadID =
|
||||
typeof res.id === "string" ? res.id : ThreadID.fromBytes(res.id).toString();
|
||||
buckets.context.withThread(textileThreadID);
|
||||
|
||||
const roots = await buckets.list();
|
||||
const existing = roots.find((bucket) => bucket.name === Constants.textile.mainBucket);
|
||||
|
||||
let ipfs = existing.path;
|
||||
const textileBucketCID = Strings.ipfsToCid(ipfs);
|
||||
|
||||
if (!textileToken || !textileThreadID || !textileBucketCID) {
|
||||
console.log("ERROR missing some value");
|
||||
continue;
|
||||
}
|
||||
// console.log({ textileToken, textileThreadID, textileBucketCID });
|
||||
userUpdates.push({ id: user.id, textileToken, textileThreadID, textileBucketCID });
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
}
|
||||
let query = userUpdates.map((user) => "(?::uuid, ?, ?, ?)").join(", ");
|
||||
let values = [];
|
||||
for (let user of userUpdates) {
|
||||
values.push(user.id, user.textileToken, user.textileThreadID, user.textileBucketCID);
|
||||
}
|
||||
|
||||
await DB.raw(
|
||||
`UPDATE ?? as u SET ?? = ??, ?? = ??, ?? = ?? from (values ${query}) as c(??, ??, ??, ??) WHERE ?? = ??`,
|
||||
[
|
||||
"users",
|
||||
"textileToken",
|
||||
"c.textileToken",
|
||||
"textileThreadID",
|
||||
"c.textileThreadID",
|
||||
"textileBucketCID",
|
||||
"c.textileBucketCID",
|
||||
...values,
|
||||
"id",
|
||||
"textileToken",
|
||||
"textileThreadID",
|
||||
"textileBucketCID",
|
||||
"c.id",
|
||||
"u.id",
|
||||
]
|
||||
);
|
||||
console.log("SCRIPT FINISHED");
|
||||
};
|
||||
|
||||
const run = async (props) => {
|
||||
const successful = [];
|
||||
const users = await DB.select("users.textileBucketCID")
|
||||
.from("users")
|
||||
.whereExists(function () {
|
||||
this.select("id")
|
||||
.from("files")
|
||||
.where("users.id", "=", "files.ownerId")
|
||||
.where("files.isLink", false);
|
||||
});
|
||||
for (let user of users) {
|
||||
let json = addToEstuary(user.cid);
|
||||
if (json) {
|
||||
successful.push({ cid });
|
||||
}
|
||||
}
|
||||
let query = await DB.insert(cids).into("deals").returning("*");
|
||||
};
|
||||
|
||||
const addToEstuary = async (cid) => {
|
||||
try {
|
||||
let res = await fetch("https://api.estuary.tech/content/add-ipfs", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${Environment.ESTUARY_API_KEY}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: "",
|
||||
root: cid,
|
||||
}),
|
||||
});
|
||||
let json = await res.json();
|
||||
console.log(json);
|
||||
return json;
|
||||
} catch (e) {
|
||||
Logging.error({
|
||||
error: e,
|
||||
decorator: "ADD_CID_TO_ESTUARY",
|
||||
});
|
||||
}
|
||||
console.log("SCRIPT FINISHED");
|
||||
};
|
||||
|
||||
recordTextileBucketInfoProduction();
|
||||
// recordTextileBucketInfo();
|
||||
// run();
|
||||
// addToEstuary();
|
Loading…
Reference in New Issue
Block a user