From 0c91a54382916dc79a7f15442ee9a2489761a711 Mon Sep 17 00:00:00 2001 From: Martina Date: Fri, 24 Sep 2021 18:20:28 -0700 Subject: [PATCH] textile bucketsGetOrCreate replacement --- common/constants.js | 6 + common/file-utilities.js | 5 +- common/strings.js | 4 + common/upload-utilities.js | 20 +- components/core/Application.js | 2 +- node_common/constants.js | 6 + node_common/data/methods/update-user-by-id.js | 2 + node_common/managers/viewer.js | 27 +-- node_common/serializers.js | 3 + node_common/utilities.js | 173 ++++++++++++++---- pages/api/data/archive.js | 14 +- pages/api/data/bucket-remove.js | 2 +- pages/api/data/delete.js | 9 +- pages/api/data/get-bucket.js | 4 +- pages/api/data/save-copy.js | 4 +- pages/api/twitter/signup-with-verification.js | 26 +-- pages/api/twitter/signup.js | 26 ++- pages/api/users/create.js | 26 +-- pages/api/users/delete.js | 4 +- pages/api/users/update.js | 7 +- pages/api/zip/get-paths.js | 4 +- scenes/SceneMakeFilecoinDeal.js | 8 +- scenes/SceneSettingsDeveloper.js | 9 +- scripts/adjust.js | 27 +-- scripts/flattening-migration.js | 9 +- scripts/repost-migration.js | 4 +- scripts/seed-database.js | 5 +- scripts/worker-heavy-stones.js | 2 +- 28 files changed, 259 insertions(+), 179 deletions(-) diff --git a/common/constants.js b/common/constants.js index 101e5ffd..172e9ecd 100644 --- a/common/constants.js +++ b/common/constants.js @@ -226,3 +226,9 @@ export const grids = { export const profileDefaultPicture = "https://slate.textile.io/ipfs/bafkreick3nscgixwfpq736forz7kzxvvhuej6kszevpsgmcubyhsx2pf7i"; + +export const textile = { + threadName: "buckets", + mainBucket: "data", + dealsBucket: "stage-deal", +}; diff --git a/common/file-utilities.js b/common/file-utilities.js index 1c85d15d..2c41a2c3 100644 --- a/common/file-utilities.js +++ b/common/file-utilities.js @@ -5,12 +5,11 @@ import * as Validations from "~/common/validations"; import * as Events from "~/common/custom-events"; import * as Logging from "~/common/logging"; import * as Environment from "~/common/environment"; +import * as Constants from "~/common/constants"; import { encode, isBlurhashValid } from "blurhash"; import { v4 as uuid } from "uuid"; -const STAGING_DEAL_BUCKET = "stage-deal"; - export const fileKey = ({ lastModified, name }) => `${lastModified}-${name}`; const loadImage = async (src) => @@ -138,7 +137,7 @@ export const upload = async ({ file, onProgress, bucketName, uploadAbort }) => { let res; if (isZipFile && isUnityFile) { res = await _privateUploadMethod(`${zipUploadRoute}${file.name}`, file); - } else if (bucketName && bucketName === STAGING_DEAL_BUCKET) { + } else if (bucketName && bucketName === Constants.textile.dealsBucket) { res = await _privateUploadMethod(`${storageDealRoute}${file.name}`, file); } else { res = await _privateUploadMethod(`${generalRoute}${file.name}`, file); diff --git a/common/strings.js b/common/strings.js index 6cf63e5f..f0531450 100644 --- a/common/strings.js +++ b/common/strings.js @@ -208,6 +208,10 @@ export const getRemainingTime = (seconds) => { return `${value} ${unit} remaining`; }; +export const ipfsToCid = (ipfs) => { + return ipfs.replace("/ipfs/", ""); +}; + export const urlToCid = (url) => { return url .replace(`${Constants.gateways.ipfs}/`, "") diff --git a/common/upload-utilities.js b/common/upload-utilities.js index 33890538..7e4b8a06 100644 --- a/common/upload-utilities.js +++ b/common/upload-utilities.js @@ -21,15 +21,14 @@ let UploadAbort = { // NOTE(amine): queue utilities const getUploadQueue = () => UploadStore.queue; -const pushToUploadQueue = ({ file, slate, bucketName }) => - UploadStore.queue.push({ file, slate, bucketName }); +const pushToUploadQueue = ({ file, slate }) => UploadStore.queue.push({ file, slate }); const resetUploadQueue = () => (UploadStore.queue = []); const removeFromUploadQueue = ({ fileKey }) => (UploadStore.queue = UploadStore.queue.filter(({ file }) => getFileKey(file) !== fileKey)); // NOTE(amine): failedFilesCache utilities -const storeFileInCache = ({ file, slate, bucketName }) => - (UploadStore.failedFilesCache[getFileKey(file)] = { file, slate, bucketName }); +const storeFileInCache = ({ file, slate }) => + (UploadStore.failedFilesCache[getFileKey(file)] = { file, slate }); const removeFileFromCache = ({ fileKey }) => delete UploadStore.failedFilesCache[fileKey]; const getFileFromCache = ({ fileKey }) => UploadStore.failedFilesCache[fileKey] || {}; @@ -56,7 +55,7 @@ export function createUploadProvider({ const uploadQueue = getUploadQueue(); if (UploadStore.isUploading || uploadQueue.length === 0) return; - const { file, slate, bucketName } = getUploadQueue().shift() || {}; + const { file, slate } = getUploadQueue().shift() || {}; const fileKey = getFileKey(file); @@ -88,7 +87,6 @@ export function createUploadProvider({ } else { const response = await FileUtilities.upload({ file, - bucketName, uploadAbort: UploadAbort, onProgress: (e) => onProgress({ fileKey, loaded: e.loaded }), }); @@ -111,7 +109,7 @@ export function createUploadProvider({ } } } catch (e) { - storeFileInCache({ file, slate, bucketName }); + storeFileInCache({ file, slate }); if (onError) onError({ fileKey }); Logging.error(e); @@ -129,7 +127,7 @@ export function createUploadProvider({ if (onFinish) onFinish(); }; - const addToUploadQueue = ({ files, slate, bucketName }) => { + const addToUploadQueue = ({ files, slate }) => { if (!files || !files.length) return; for (let i = 0; i < files.length; i++) { @@ -145,7 +143,7 @@ export function createUploadProvider({ if (fileKey in UploadStore.failedFilesCache) removeFileFromCache({ fileKey }); if (onAddedToQueue) onAddedToQueue(files[i]); - pushToUploadQueue({ file: files[i], slate, bucketName }); + pushToUploadQueue({ file: files[i], slate }); } const isQueueEmpty = getUploadQueue().length === 0; @@ -156,12 +154,12 @@ export function createUploadProvider({ }; const retry = ({ fileKey }) => { - const { file, slate, bucketName } = getFileFromCache({ fileKey }); + const { file, slate } = getFileFromCache({ fileKey }); if (file.type === "link") { addLinkToUploadQueue({ url: file.name, slate }); return; } - addToUploadQueue({ files: [file], slate, bucketName }); + addToUploadQueue({ files: [file], slate }); }; const cancel = ({ fileKey }) => { diff --git a/components/core/Application.js b/components/core/Application.js index a2c96068..775240d7 100644 --- a/components/core/Application.js +++ b/components/core/Application.js @@ -424,6 +424,7 @@ export default class ApplicationPage extends React.Component { }; render() { + // console.log(this.state.viewer); let page = this.state.page; if (!page?.id) { page = NavigationData.getById(null, this.state.viewer); @@ -457,7 +458,6 @@ export default class ApplicationPage extends React.Component { isMobile: this.state.isMobile, isMac: this.props.isMac, activeUsers: this.state.activeUsers, - userBucketCID: this.state.userBucketCID, external: !!!this.state.viewer, }); diff --git a/node_common/constants.js b/node_common/constants.js index 17a76120..f5ff8cbf 100644 --- a/node_common/constants.js +++ b/node_common/constants.js @@ -17,3 +17,9 @@ export const MIN_ARCHIVE_SIZE_BYTES = 104857600; // NOTE(amine): 15 minutes export const TOKEN_EXPIRATION_TIME = 2 * 60 * 60 * 1000; + +export const textile = { + threadName: "buckets", + mainBucket: "data", + dealsBucket: "stage-deal", +}; diff --git a/node_common/data/methods/update-user-by-id.js b/node_common/data/methods/update-user-by-id.js index cfc79238..3ea1d0b6 100644 --- a/node_common/data/methods/update-user-by-id.js +++ b/node_common/data/methods/update-user-by-id.js @@ -3,10 +3,12 @@ import * as Serializers from "~/node_common/serializers"; import { runQuery } from "~/node_common/data/utilities"; export default async (user) => { + console.log("inside update user by id"); return await runQuery({ label: "UPDATE_USER_BY_ID", queryFn: async (DB) => { const query = await DB.from("users").where("id", user.id).update(user).returning("*"); + console.log({ query }); const index = query ? query.pop() : null; return JSON.parse(JSON.stringify(index)); diff --git a/node_common/managers/viewer.js b/node_common/managers/viewer.js index c223fedb..7c8ef30a 100644 --- a/node_common/managers/viewer.js +++ b/node_common/managers/viewer.js @@ -12,8 +12,6 @@ import * as Logging from "~/common/logging"; import WebSocket from "ws"; -const STAGING_DEAL_BUCKET = "stage-deal"; - const websocketSend = async (type, data) => { if (Strings.isEmpty(Environment.PUBSUB_SECRET)) { return; @@ -143,18 +141,20 @@ export const getById = async ({ id }) => { return null; } + delete user.password; + delete user.salt; + Data.createUsageStat({ id }); //NOTE(martina): to record the person's usage of Slate for analytics // user.library = await Data.getFilesByUserId({ id }); - const [slates, keys, subscriptions, following, followers, { bucketRoot }] = ( + const [slates, keys, subscriptions, following, followers] = ( await Promise.allSettled([ Data.getSlatesByUserId({ ownerId: id, includeFiles: true }), Data.getAPIKeysByUserId({ userId: id }), Data.getSubscriptionsByUserId({ ownerId: id }), Data.getFollowingByUserId({ ownerId: id }), Data.getFollowersByUserId({ userId: id }), - Utilities.getBucketAPIFromUserToken({ user }), ]) ).map((item) => item.value); @@ -217,7 +217,7 @@ export const getById = async ({ id }) => { pdfBytes, }, // tags, - userBucketCID: bucketRoot?.path || null, + // userBucketCID: bucketRoot?.path || null, keys, slates, subscriptions, @@ -245,9 +245,7 @@ export const getDealHistory = async ({ id }) => { let deals = []; try { - const FilecoinSingleton = await Utilities.getFilecoinAPIFromUserToken({ - user, - }); + const FilecoinSingleton = await Utilities.getBucket({ user }); const { filecoin } = FilecoinSingleton; const records = await filecoin.storageDealRecords({ @@ -312,10 +310,9 @@ export const getTextileById = async ({ id }) => { } // NOTE(jim): This bucket is purely for staging data for other deals. - const stagingData = await Utilities.getBucketAPIFromUserToken({ + const stagingData = await Utilities.getBucket({ user, - bucketName: STAGING_DEAL_BUCKET, - encrypted: false, + bucketName: Constants.textile.dealsBucket, }); const FilecoinSingleton = await Utilities.getFilecoinAPIFromUserToken({ @@ -361,7 +358,7 @@ export const getTextileById = async ({ id }) => { } let items = null; - const dealBucket = r.find((bucket) => bucket.name === STAGING_DEAL_BUCKET); + const dealBucket = r.find((bucket) => bucket.name === Constants.textile.dealsBucket); try { const path = await stagingData.buckets.listPath(dealBucket.key, "/"); items = path.item.items; @@ -375,11 +372,7 @@ export const getTextileById = async ({ id }) => { }); } - const b = await Utilities.getBucketAPIFromUserToken({ - user, - bucketName: "data", - encrypted: false, - }); + const b = await Utilities.getBucket({ user }); const settings = await b.buckets.defaultArchiveConfig(b.bucketKey); diff --git a/node_common/serializers.js b/node_common/serializers.js index 58d36dd7..fe30c5a1 100644 --- a/node_common/serializers.js +++ b/node_common/serializers.js @@ -18,7 +18,10 @@ // name: user.name, // twitterUsername: user.twitterUsername, // twitterVerified: user.twitterVerified, +// textileKey: user.textileKey, // textileToken: user.textileToken, +// textileThreadID: user.textileThreadID, +// textileBucketCID: user.textileThreadID, // settingsDealAutoApprove: user.settingsDealAutoApprove, // allowAutomaticDataStorage: user.allowAutomaticDataStorage, // allowEncryptedDataStorage: user.allowEncryptedDataStorage, diff --git a/node_common/utilities.js b/node_common/utilities.js index fcb64c03..b8defea8 100644 --- a/node_common/utilities.js +++ b/node_common/utilities.js @@ -18,8 +18,6 @@ const ENCRYPTION_IV = crypto.randomBytes(16); import { Buckets, PrivateKey, Filecoin, Client, ThreadID } from "@textile/hub"; -const BUCKET_NAME = "data"; - const TEXTILE_KEY_INFO = { key: Environment.TEXTILE_HUB_KEY, secret: Environment.TEXTILE_HUB_SECRET, @@ -117,8 +115,8 @@ export const parseAuthHeader = (value) => { }; export const getFilecoinAPIFromUserToken = async ({ user }) => { - const token = user.textileToken; - const identity = await PrivateKey.fromString(token); + const textileKey = user.textileKey; + const identity = await PrivateKey.fromString(textileKey); const filecoin = await Filecoin.withKeyInfo(TEXTILE_KEY_INFO); await filecoin.getToken(identity); @@ -161,43 +159,154 @@ export const addExistingCIDToData = async ({ buckets, key, path, cid }) => { }; // NOTE(jim): Requires @textile/hub -export const getBucketAPIFromUserToken = async ({ user, bucketName, encrypted = false }) => { - const token = user.textileToken; - const name = Strings.isEmpty(bucketName) ? BUCKET_NAME : bucketName; - const identity = await PrivateKey.fromString(token); +// export const getBucketAPIFromUserToken = async ({ +// user, +// bucketName = Constants.textile.mainBucket, +// encrypted = false, +// }) => { +// const token = user.textileToken; +// const name = bucketName; +// const identity = await PrivateKey.fromString(token); +// let buckets = await Buckets.withKeyInfo(TEXTILE_KEY_INFO); + +// const textileToken = await buckets.getToken(identity); + +// let root = null; +// Logging.log(`buckets.getOrCreate() init ${name}`); +// try { +// Logging.log("before buckets get or create"); +// const created = await buckets.getOrCreate(name, { encrypted }); +// Logging.log("after buckets get or create"); +// root = created.root; +// } catch (e) { +// Logging.log(`buckets.getOrCreate() warning: ${e.message}`); +// Social.sendTextileSlackMessage({ +// file: "/node_common/utilities.js", +// user, +// message: e.message, +// code: e.code, +// functionName: `buckets.getOrCreate`, +// }); +// } + +// if (!root) { +// Logging.error(`buckets.getOrCreate() failed for ${name}`); +// return { buckets: null, bucketKey: null, bucketRoot: null }; +// } + +// Logging.log(`buckets.getOrCreate() success for ${name}`); +// return { +// buckets, +// bucketKey: root.key, +// bucketRoot: root, +// bucketName: name, +// }; +// }; + +//NOTE(martina): only use this upon creating a new user. This creates their bucket without checking for an existing bucket +export const createBucket = async ({ + bucketName = Constants.textile.mainBucket, + encrypted = false, +}) => { + try { + const identity = await PrivateKey.fromRandom(); + const textileKey = identity.toString(); + + let buckets = await Buckets.withKeyInfo(TEXTILE_KEY_INFO); + + const textileToken = await buckets.getToken(identity); + buckets.context.withToken(textileToken); + + const client = new Client(buckets.context); + const newId = ThreadID.fromRandom(); + await client.newDB(newId, Constants.textile.threadName); + const textileThreadID = newId.toString(); + buckets.context.withThread(textileThreadID); + + const created = await buckets.create(bucketName, { encrypted }); + let ipfs = created.root.path; + const textileBucketCID = Strings.ipfsToCid(ipfs); + + console.log({ + textileKey, + textileToken, + textileThreadID, + textileBucketCID, + buckets, + bucketKey: created.root.key, + bucketRoot: created.root, + bucketName, + }); + + return { + textileKey, + textileToken, + textileThreadID, + textileBucketCID, + buckets, + bucketKey: created.root.key, + bucketRoot: created.root, + bucketName, + }; + } catch (e) { + Logging.error(e?.message); + } +}; + +//NOTE(martina): only use this for existing users. This grabs their bucket without checking for an existing bucket +export const getBucket = async ({ user, bucketName = Constants.textile.mainBucket }) => { + let updateUser = false; + let { textileKey, textileToken, textileThreadID, textileBucketCID } = user; + + if (!textileKey) { + return await createBucket({ user, bucketName }); + } + let buckets = await Buckets.withKeyInfo(TEXTILE_KEY_INFO); - await buckets.getToken(identity); + if (!textileToken) { + const identity = PrivateKey.fromString(textileKey); + textileToken = await buckets.getToken(identity); + updateUser = true; + } + buckets.context.withToken(textileToken); - let root = null; - Logging.log(`buckets.getOrCreate() init ${name}`); - try { - Logging.log("before buckets get or create"); - const created = await buckets.getOrCreate(name, { encrypted }); - Logging.log("after buckets get or create"); - root = created.root; - } catch (e) { - Logging.log(`buckets.getOrCreate() warning: ${e.message}`); - Social.sendTextileSlackMessage({ - file: "/node_common/utilities.js", - user, - message: e.message, - code: e.code, - functionName: `buckets.getOrCreate`, - }); + if (!textileThreadID) { + const client = new Client(buckets.context); + const res = await client.getThread("buckets"); + textileThreadID = typeof res.id === "string" ? res.id : ThreadID.fromBytes(res.id).toString(); + updateUser = true; + } + buckets.context.withThread(textileThreadID); + + const roots = await buckets.list(); + const existing = roots.find((bucket) => bucket.name === bucketName); + + if (!existing) { + return { buckets: null, bucketKey: null, bucketRoot: null, bucketName }; } - if (!root) { - Logging.error(`buckets.getOrCreate() failed for ${name}`); - return { buckets: null, bucketKey: null, bucketRoot: null }; + if (!textileBucketCID) { + let ipfs = existing.path; + textileBucketCID = Strings.ipfsToCid(ipfs); + updateUser = true; + } + if (updateUser) { + Data.updateUserById({ id: user.id, textileToken, textileThreadID, textileBucketCID }); } - Logging.log(`buckets.getOrCreate() success for ${name}`); + console.log({ + buckets, + bucketKey: existing.key, + bucketRoot: existing, + bucketName, + }); + return { buckets, - bucketKey: root.key, - bucketRoot: root, - bucketName: name, + bucketKey: existing.key, + bucketRoot: existing, + bucketName, }; }; diff --git a/pages/api/data/archive.js b/pages/api/data/archive.js index 3e51ff7e..3d5b3227 100644 --- a/pages/api/data/archive.js +++ b/pages/api/data/archive.js @@ -1,4 +1,4 @@ -import * as Data from "~/node_common/data"; +import * as Constants from "~/node_common/constants"; import * as Utilities from "~/node_common/utilities"; import * as Social from "~/node_common/social"; import * as Strings from "~/common/strings"; @@ -8,19 +8,17 @@ import * as RequestUtilities from "~/node_common/request-utilities"; import { v4 as uuid } from "uuid"; import { MAX_BUCKET_COUNT, MIN_ARCHIVE_SIZE_BYTES } from "~/node_common/constants"; -const STAGING_DEAL_BUCKET = "stage-deal"; - export default async (req, res) => { const userInfo = await RequestUtilities.checkAuthorizationInternal(req, res); if (!userInfo) return; const { id, user } = userInfo; - let bucketName = null; + let bucketName = Constants.bucketNames.deals; if (req.body.data && req.body.data.bucketName) { bucketName = req.body.data.bucketName; } - const { buckets, bucketKey, bucketRoot } = await Utilities.getBucketAPIFromUserToken({ + const { buckets, bucketKey } = await Utilities.getBucket({ user, bucketName, }); @@ -37,7 +35,7 @@ export default async (req, res) => { let items = null; let bucketSizeBytes = 0; try { - const path = await buckets.listPath(bucketRoot.key, "/"); + const path = await buckets.listPath(bucketKey, "/"); items = path.item; bucketSizeBytes = path.item.size; } catch (e) { @@ -108,7 +106,7 @@ export default async (req, res) => { // NOTE(jim): Either encrypt the bucket or don't encrypt the bucket. let encryptThisDeal = false; - if (bucketName !== STAGING_DEAL_BUCKET && user.allowEncryptedDataStorage) { + if (bucketName !== Constants.bucketNames.deals && user.allowEncryptedDataStorage) { encryptThisDeal = true; } @@ -116,7 +114,7 @@ export default async (req, res) => { encryptThisDeal = true; } - let key = bucketRoot.key; + let key = bucketKey; let encryptedBucketName = null; if (user.allowEncryptedDataStorage || req.body.data.forceEncryption) { encryptedBucketName = req.body.data.forceEncryption diff --git a/pages/api/data/bucket-remove.js b/pages/api/data/bucket-remove.js index 86b9e80b..f274eee3 100644 --- a/pages/api/data/bucket-remove.js +++ b/pages/api/data/bucket-remove.js @@ -13,7 +13,7 @@ export default async (req, res) => { return res.status(500).send({ decorator: "SERVER_BUCKET_REMOVE_NO_CID", error: true }); } - const { buckets, bucketKey } = await Utilities.getBucketAPIFromUserToken({ + const { buckets, bucketKey } = await Utilities.getBucket({ user, bucketName: req.body.data.bucketName, }); diff --git a/pages/api/data/delete.js b/pages/api/data/delete.js index 2d052ca9..90bdafc2 100644 --- a/pages/api/data/delete.js +++ b/pages/api/data/delete.js @@ -1,3 +1,4 @@ +import * as Constants from "~/node_common/constants"; import * as Data from "~/node_common/data"; import * as Utilities from "~/node_common/utilities"; import * as Arrays from "~/common/arrays"; @@ -7,8 +8,6 @@ import * as ViewerManager from "~/node_common/managers/viewer"; import * as SearchManager from "~/node_common/managers/search"; import * as RequestUtilities from "~/node_common/request-utilities"; -const DEFAULT_BUCKET_NAME = "data"; - export default async (req, res) => { const userInfo = await RequestUtilities.checkAuthorizationInternal(req, res); if (!userInfo) return; @@ -25,9 +24,7 @@ export default async (req, res) => { return res.status(400).send({ decorator: "SERVER_REMOVE_DATA_NO_IDS", error: true }); } - const { buckets, bucketKey } = await Utilities.getBucketAPIFromUserToken({ - user, - }); + const { buckets, bucketKey } = await Utilities.getBucket({ user }); if (!buckets) { return res.status(500).send({ @@ -58,7 +55,7 @@ export default async (req, res) => { let items = []; try { for (let i = 0; i < r.length; i++) { - if (r[i].name === DEFAULT_BUCKET_NAME) { + if (r[i].name === Constants.textile.mainBucket) { const next = await buckets.listPath(r[i].key, "/"); const set = next.item.items; items = [...set, ...items]; diff --git a/pages/api/data/get-bucket.js b/pages/api/data/get-bucket.js index 9ca4f5e9..78707f1b 100644 --- a/pages/api/data/get-bucket.js +++ b/pages/api/data/get-bucket.js @@ -9,9 +9,7 @@ export default async (req, res) => { if (!userInfo) return; const { id, user } = userInfo; - const { buckets, bucketKey } = await Utilities.getBucketAPIFromUserToken({ - user, - }); + const { buckets, bucketKey } = await Utilities.getBucket({ user }); if (!buckets) { return res.status(500).send({ decorator: "SERVER_NO_BUCKET_DATA", error: true }); diff --git a/pages/api/data/save-copy.js b/pages/api/data/save-copy.js index b64a9c53..a5c428cd 100644 --- a/pages/api/data/save-copy.js +++ b/pages/api/data/save-copy.js @@ -17,9 +17,7 @@ export default async (req, res) => { let decorator = "SERVER_SAVE_COPY"; - let { buckets, bucketKey, bucketRoot } = await Utilities.getBucketAPIFromUserToken({ - user, - }); + let { buckets, bucketKey, bucketRoot } = await Utilities.getBucket({ user }); if (!buckets) { return res.status(500).send({ diff --git a/pages/api/twitter/signup-with-verification.js b/pages/api/twitter/signup-with-verification.js index 99063a56..c473ca01 100644 --- a/pages/api/twitter/signup-with-verification.js +++ b/pages/api/twitter/signup-with-verification.js @@ -8,8 +8,6 @@ import * as Constants from "~/node_common/constants"; import JWT from "jsonwebtoken"; -import { PrivateKey } from "@textile/hub"; - export default async (req, res) => { const { pin, username } = req.body.data; @@ -83,21 +81,14 @@ export default async (req, res) => { return res.status(201).send({ decorator: "SERVER_CREATE_USER_USERNAME_TAKEN" }); } - // TODO(jim): - // Single Key Textile Auth. - const identity = await PrivateKey.fromRandom(); - const textileToken = identity.toString(); + const { + textileKey, + textileToken, + textileThreadID, + textileBucketCID, + } = await Utilities.createBucket({}); - // TODO(jim): - // Don't do this once you refactor. - const { buckets, bucketKey, bucketName } = await Utilities.getBucketAPIFromUserToken({ - user: { - username: newUsername, - textileToken, - }, - }); - - if (!buckets) { + if (!textileKey || !textileToken || !textileThreadID || !textileBucketCID) { return res .status(500) .send({ decorator: "SERVER_CREATE_USER_BUCKET_INIT_FAILURE", error: true }); @@ -109,7 +100,10 @@ export default async (req, res) => { twitterId: twitterUser.id_str, twitterUsername: twitterUser.screen_name, twitterVerified: twitterUser.verified, + textileKey, textileToken, + textileThreadID, + textileBucketCID, }); if (!user) { diff --git a/pages/api/twitter/signup.js b/pages/api/twitter/signup.js index 77d1d14f..4a2be781 100644 --- a/pages/api/twitter/signup.js +++ b/pages/api/twitter/signup.js @@ -7,8 +7,6 @@ import * as SlateManager from "~/node_common/managers/slate"; import JWT from "jsonwebtoken"; -import { PrivateKey } from "@textile/hub"; - const COOKIE_NAME = "oauth_token"; export default async (req, res) => { @@ -64,22 +62,17 @@ export default async (req, res) => { return res.status(201).send({ decorator: "SERVER_CREATE_USER_USERNAME_TAKEN" }); } - // TODO(jim): - // Single Key Textile Auth. - const identity = await PrivateKey.fromRandom(); - const textileToken = identity.toString(); - const newUsername = username.toLowerCase(); const newEmail = email.toLowerCase(); - const { buckets, bucketKey, bucketName } = await Utilities.getBucketAPIFromUserToken({ - user: { - username: newUsername, - textileToken, - }, - }); + const { + textileKey, + textileToken, + textileThreadID, + textileBucketCID, + } = await Utilities.createBucket({}); - if (!buckets) { + if (!textileKey || !textileToken || !textileThreadID || !textileBucketCID) { return res .status(500) .send({ decorator: "SERVER_CREATE_USER_BUCKET_INIT_FAILURE", error: true }); @@ -90,8 +83,11 @@ export default async (req, res) => { email: newEmail, twitterId: twitterUser.id_str, twitterUsername: twitterUser.screen_name, - twitterVerifeid: twitterUser.verified, + twitterVerified: twitterUser.verified, + textileKey, textileToken, + textileThreadID, + textileBucketCID, }); if (!user) { diff --git a/pages/api/users/create.js b/pages/api/users/create.js index f70c654d..0f2cb6d9 100644 --- a/pages/api/users/create.js +++ b/pages/api/users/create.js @@ -9,8 +9,6 @@ import * as Monitor from "~/node_common/monitor"; import BCrypt from "bcrypt"; -import { PrivateKey } from "@textile/hub"; - export default async (req, res) => { if (!Strings.isEmpty(Environment.ALLOWED_HOST) && req.headers.host !== Environment.ALLOWED_HOST) { return res.status(403).send({ decorator: "SERVER_CREATE_USER_NOT_ALLOWED", error: true }); @@ -54,24 +52,17 @@ export default async (req, res) => { const salt = await BCrypt.genSalt(rounds); const hash = await Utilities.encryptPassword(req.body.data.password, salt); - // TODO(jim): - // Single Key Textile Auth. - const identity = await PrivateKey.fromRandom(); - const textileToken = identity.toString(); - - // TODO(jim): - // Don't do this once you refactor. const newUsername = req.body.data.username.toLowerCase(); const newEmail = verification.email; - const { buckets, bucketKey, bucketName } = await Utilities.getBucketAPIFromUserToken({ - user: { - username: newUsername, - textileToken, - }, - }); + const { + textileKey, + textileToken, + textileThreadID, + textileBucketCID, + } = await Utilities.createBucket({}); - if (!buckets) { + if (!textileKey || !textileToken || !textileThreadID || !textileBucketCID) { return res .status(500) .send({ decorator: "SERVER_CREATE_USER_BUCKET_INIT_FAILURE", error: true }); @@ -82,7 +73,10 @@ export default async (req, res) => { salt, username: newUsername, email: newEmail, + textileKey, textileToken, + textileThreadID, + textileBucketCID, }); if (!user) { diff --git a/pages/api/users/delete.js b/pages/api/users/delete.js index d835a98d..2d7f56c6 100644 --- a/pages/api/users/delete.js +++ b/pages/api/users/delete.js @@ -26,7 +26,7 @@ export default async (req, res) => { files = await Data.deleteFilesByUserId({ ownerId: id }); console.log({ files }); - const defaultData = await Utilities.getBucketAPIFromUserToken({ user }); + const defaultData = await Utilities.getBucket({ user }); // NOTE(jim): delete every bucket try { @@ -49,7 +49,7 @@ export default async (req, res) => { // NOTE(jim): remove orphan await Data.createOrphan({ - data: { token: user.textileToken }, + data: { token: user.textileKey }, }); // NOTE(jim): finally delete user by id (irreversible) diff --git a/pages/api/users/update.js b/pages/api/users/update.js index f19a3b6a..0ab2e445 100644 --- a/pages/api/users/update.js +++ b/pages/api/users/update.js @@ -92,10 +92,7 @@ export default async (req, res) => { if (req.body.data.type === "SAVE_DEFAULT_ARCHIVE_CONFIG") { let b; try { - b = await Utilities.getBucketAPIFromUserToken({ - user, - bucketName: "data", - }); + b = await Utilities.getBucket({ user }); } catch (e) { Logging.error(e); Social.sendTextileSlackMessage({ @@ -103,7 +100,7 @@ export default async (req, res) => { user, message: e.message, code: e.code, - functionName: `Utilities.getBucketAPIFromUserToken`, + functionName: `Utilities.getBucket`, }); return res.status(500).send({ decorator: "SERVER_NO_BUCKET_DATA", error: true }); diff --git a/pages/api/zip/get-paths.js b/pages/api/zip/get-paths.js index dc55cb4c..878cfba5 100644 --- a/pages/api/zip/get-paths.js +++ b/pages/api/zip/get-paths.js @@ -12,9 +12,7 @@ export default async (req, res) => { return res.status(403).send({ decorator: "SERVER_USER_NOT_FOUND", error: true }); } - let { buckets, bucketKey } = await Utilities.getBucketAPIFromUserToken({ - user, - }); + let { buckets, bucketKey } = await Utilities.getBucket({ user }); if (!buckets) { return res.status(500).send({ diff --git a/scenes/SceneMakeFilecoinDeal.js b/scenes/SceneMakeFilecoinDeal.js index 2e6da69c..4a9a4d9e 100644 --- a/scenes/SceneMakeFilecoinDeal.js +++ b/scenes/SceneMakeFilecoinDeal.js @@ -20,8 +20,6 @@ import Section from "~/components/core/Section"; import ScenePage from "~/components/core/ScenePage"; import ScenePageHeader from "~/components/core/ScenePageHeader"; -const STAGING_DEAL_BUCKET = "stage-deal"; - const STYLES_SPINNER_CONTAINER = css` width: 100%; height: 40vh; @@ -110,7 +108,7 @@ export default class SceneMakeFilecoinDeal extends React.Component { const file = e.target.files[i]; const response = await FileUtilities.upload({ - bucketName: STAGING_DEAL_BUCKET, + bucketName: Constants.textile.dealsBucket, file, }); } @@ -132,7 +130,7 @@ export default class SceneMakeFilecoinDeal extends React.Component { this.setState({ archiving: true }); const response = await Actions.archive({ - bucketName: STAGING_DEAL_BUCKET, + bucketName: Constants.textile.dealsBucket, forceEncryption: this.state.encryption, settings: { /** @@ -213,7 +211,7 @@ export default class SceneMakeFilecoinDeal extends React.Component { _handleRemove = async (cid) => { this.setState({ loading: true }); - await Actions.removeFromBucket({ bucketName: STAGING_DEAL_BUCKET, cid }); + await Actions.removeFromBucket({ bucketName: Constants.textile.dealsBucket, cid }); let networkViewer; try { diff --git a/scenes/SceneSettingsDeveloper.js b/scenes/SceneSettingsDeveloper.js index fe6d5be5..e6ae2ed6 100644 --- a/scenes/SceneSettingsDeveloper.js +++ b/scenes/SceneSettingsDeveloper.js @@ -183,10 +183,7 @@ export default class SceneSettingsDeveloper extends React.Component { } } - let userBucketCID = this.props.viewer?.userBucketCID; - if (userBucketCID) { - userBucketCID = userBucketCID.replace("/ipfs/", ""); - } + let textileBucketCID = this.props.viewer?.textileBucketCID; return ( - {userBucketCID && ( + {textileBucketCID && (
{ table.string("name").nullable(); table.string("twitterUsername").nullable(); table.boolean("twitterVerified").notNullable().defaultTo(false); - table.string("textileToken").nullable(); + table.string("textileKey").nullable(); + table.string("textileToken", 400).nullable(); + table.string("textileThreadID").nullable(); + table.string("textileBucketCID").nullable(); table.boolean("settingsDealsAutoApprove").notNullable().defaultTo(false); table.boolean("allowAutomaticDataStorage").notNullable().defaultTo(true); table.boolean("allowEncryptedDataStorage").notNullable().defaultTo(true); @@ -191,7 +194,7 @@ const migrateUserTable = async () => { name: data.name, body: data.body, photo: data.photo, - textileToken: data.tokens?.api, + textileKey: data.tokens?.api, settingsDealsAutoApprove: data.settings?.settings_deals_auto_approve, allowAutomaticDataStorage: data.settings?.allow_automatic_data_storage, allowEncryptedDataStorage: data.settings?.allow_encrypted_data_storage, @@ -343,7 +346,7 @@ Users 'data.body', -> 'body' MIGRATED 'data.photo', -> 'photo' MIGRATED 'data.status', -> 'onboarding.hidePrivacyAlert' MIGRATED - 'data.tokens.api', -> 'textileToken' MIGRATED + 'data.tokens.api', -> 'textileKey' MIGRATED 'data.settings.settings_deals_auto_approve', -> 'settingsDealsAutoApprove' MIGRATED 'data.settings.allow_automatic_data_storage', -> 'allowAutomaticDataStorage' MIGRATED 'data.settings.allow_encrypted_data_storage', -> 'allowEncryptedDataStorage' MIGRATED diff --git a/scripts/repost-migration.js b/scripts/repost-migration.js index ed533473..1c10e907 100644 --- a/scripts/repost-migration.js +++ b/scripts/repost-migration.js @@ -35,9 +35,7 @@ const saveCopyReposts = async () => { Logging.log(item); // continue; let user = { data: item.data }; - let { buckets, bucketKey, bucketRoot } = await Utilities.getBucketAPIFromUserToken({ - user, - }); + let { buckets, bucketKey, bucketRoot } = await Utilities.getBucket({ user }); try { let response = await Utilities.addExistingCIDToData({ diff --git a/scripts/seed-database.js b/scripts/seed-database.js index c5904576..1c784091 100644 --- a/scripts/seed-database.js +++ b/scripts/seed-database.js @@ -39,7 +39,10 @@ const createUsersTable = db.schema.createTable("users", function (table) { table.string("twitterId").unique().nullable(); table.string("twitterUsername").nullable(); table.boolean("twitterVerified").notNullable().defaultTo(false); - table.string("textileToken").nullable(); + table.string("textileKey").nullable(); + table.string("textileToken", 400).nullable(); + table.string("textileThreadID").nullable(); + table.string("textileBucketCID").nullable(); table.boolean("settingsDealsAutoApprove").notNullable().defaultTo(false); table.boolean("allowAutomaticDataStorage").notNullable().defaultTo(true); table.boolean("allowEncryptedDataStorage").notNullable().defaultTo(true); diff --git a/scripts/worker-heavy-stones.js b/scripts/worker-heavy-stones.js index a49636c9..8a036f3c 100644 --- a/scripts/worker-heavy-stones.js +++ b/scripts/worker-heavy-stones.js @@ -95,7 +95,7 @@ const run = async () => { await delay(500); try { - const token = user.textileToken; + const token = user.textileKey; const identity = await PrivateKey.fromString(token); buckets = await Buckets.withKeyInfo(TEXTILE_KEY_INFO); await buckets.getToken(identity);