Merge pull request #955 from filecoin-project/@martinalong/textile-speedup

textile bucketsGetOrCreate speed up
This commit is contained in:
martinalong 2021-09-24 18:21:54 -07:00 committed by GitHub
commit df9d426c18
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 259 additions and 179 deletions

View File

@ -226,3 +226,9 @@ export const grids = {
export const profileDefaultPicture =
"https://slate.textile.io/ipfs/bafkreick3nscgixwfpq736forz7kzxvvhuej6kszevpsgmcubyhsx2pf7i";
export const textile = {
threadName: "buckets",
mainBucket: "data",
dealsBucket: "stage-deal",
};

View File

@ -5,12 +5,11 @@ import * as Validations from "~/common/validations";
import * as Events from "~/common/custom-events";
import * as Logging from "~/common/logging";
import * as Environment from "~/common/environment";
import * as Constants from "~/common/constants";
import { encode, isBlurhashValid } from "blurhash";
import { v4 as uuid } from "uuid";
const STAGING_DEAL_BUCKET = "stage-deal";
export const fileKey = ({ lastModified, name }) => `${lastModified}-${name}`;
const loadImage = async (src) =>
@ -138,7 +137,7 @@ export const upload = async ({ file, onProgress, bucketName, uploadAbort }) => {
let res;
if (isZipFile && isUnityFile) {
res = await _privateUploadMethod(`${zipUploadRoute}${file.name}`, file);
} else if (bucketName && bucketName === STAGING_DEAL_BUCKET) {
} else if (bucketName && bucketName === Constants.textile.dealsBucket) {
res = await _privateUploadMethod(`${storageDealRoute}${file.name}`, file);
} else {
res = await _privateUploadMethod(`${generalRoute}${file.name}`, file);

View File

@ -208,6 +208,10 @@ export const getRemainingTime = (seconds) => {
return `${value} ${unit} remaining`;
};
export const ipfsToCid = (ipfs) => {
return ipfs.replace("/ipfs/", "");
};
export const urlToCid = (url) => {
return url
.replace(`${Constants.gateways.ipfs}/`, "")

View File

@ -21,15 +21,14 @@ let UploadAbort = {
// NOTE(amine): queue utilities
const getUploadQueue = () => UploadStore.queue;
const pushToUploadQueue = ({ file, slate, bucketName }) =>
UploadStore.queue.push({ file, slate, bucketName });
const pushToUploadQueue = ({ file, slate }) => UploadStore.queue.push({ file, slate });
const resetUploadQueue = () => (UploadStore.queue = []);
const removeFromUploadQueue = ({ fileKey }) =>
(UploadStore.queue = UploadStore.queue.filter(({ file }) => getFileKey(file) !== fileKey));
// NOTE(amine): failedFilesCache utilities
const storeFileInCache = ({ file, slate, bucketName }) =>
(UploadStore.failedFilesCache[getFileKey(file)] = { file, slate, bucketName });
const storeFileInCache = ({ file, slate }) =>
(UploadStore.failedFilesCache[getFileKey(file)] = { file, slate });
const removeFileFromCache = ({ fileKey }) => delete UploadStore.failedFilesCache[fileKey];
const getFileFromCache = ({ fileKey }) => UploadStore.failedFilesCache[fileKey] || {};
@ -56,7 +55,7 @@ export function createUploadProvider({
const uploadQueue = getUploadQueue();
if (UploadStore.isUploading || uploadQueue.length === 0) return;
const { file, slate, bucketName } = getUploadQueue().shift() || {};
const { file, slate } = getUploadQueue().shift() || {};
const fileKey = getFileKey(file);
@ -88,7 +87,6 @@ export function createUploadProvider({
} else {
const response = await FileUtilities.upload({
file,
bucketName,
uploadAbort: UploadAbort,
onProgress: (e) => onProgress({ fileKey, loaded: e.loaded }),
});
@ -111,7 +109,7 @@ export function createUploadProvider({
}
}
} catch (e) {
storeFileInCache({ file, slate, bucketName });
storeFileInCache({ file, slate });
if (onError) onError({ fileKey });
Logging.error(e);
@ -129,7 +127,7 @@ export function createUploadProvider({
if (onFinish) onFinish();
};
const addToUploadQueue = ({ files, slate, bucketName }) => {
const addToUploadQueue = ({ files, slate }) => {
if (!files || !files.length) return;
for (let i = 0; i < files.length; i++) {
@ -145,7 +143,7 @@ export function createUploadProvider({
if (fileKey in UploadStore.failedFilesCache) removeFileFromCache({ fileKey });
if (onAddedToQueue) onAddedToQueue(files[i]);
pushToUploadQueue({ file: files[i], slate, bucketName });
pushToUploadQueue({ file: files[i], slate });
}
const isQueueEmpty = getUploadQueue().length === 0;
@ -156,12 +154,12 @@ export function createUploadProvider({
};
const retry = ({ fileKey }) => {
const { file, slate, bucketName } = getFileFromCache({ fileKey });
const { file, slate } = getFileFromCache({ fileKey });
if (file.type === "link") {
addLinkToUploadQueue({ url: file.name, slate });
return;
}
addToUploadQueue({ files: [file], slate, bucketName });
addToUploadQueue({ files: [file], slate });
};
const cancel = ({ fileKey }) => {

View File

@ -424,6 +424,7 @@ export default class ApplicationPage extends React.Component {
};
render() {
// console.log(this.state.viewer);
let page = this.state.page;
if (!page?.id) {
page = NavigationData.getById(null, this.state.viewer);
@ -457,7 +458,6 @@ export default class ApplicationPage extends React.Component {
isMobile: this.state.isMobile,
isMac: this.props.isMac,
activeUsers: this.state.activeUsers,
userBucketCID: this.state.userBucketCID,
external: !!!this.state.viewer,
});

View File

@ -17,3 +17,9 @@ export const MIN_ARCHIVE_SIZE_BYTES = 104857600;
// NOTE(amine): 15 minutes
export const TOKEN_EXPIRATION_TIME = 2 * 60 * 60 * 1000;
export const textile = {
threadName: "buckets",
mainBucket: "data",
dealsBucket: "stage-deal",
};

View File

@ -3,10 +3,12 @@ import * as Serializers from "~/node_common/serializers";
import { runQuery } from "~/node_common/data/utilities";
export default async (user) => {
console.log("inside update user by id");
return await runQuery({
label: "UPDATE_USER_BY_ID",
queryFn: async (DB) => {
const query = await DB.from("users").where("id", user.id).update(user).returning("*");
console.log({ query });
const index = query ? query.pop() : null;
return JSON.parse(JSON.stringify(index));

View File

@ -12,8 +12,6 @@ import * as Logging from "~/common/logging";
import WebSocket from "ws";
const STAGING_DEAL_BUCKET = "stage-deal";
const websocketSend = async (type, data) => {
if (Strings.isEmpty(Environment.PUBSUB_SECRET)) {
return;
@ -143,18 +141,20 @@ export const getById = async ({ id }) => {
return null;
}
delete user.password;
delete user.salt;
Data.createUsageStat({ id }); //NOTE(martina): to record the person's usage of Slate for analytics
// user.library = await Data.getFilesByUserId({ id });
const [slates, keys, subscriptions, following, followers, { bucketRoot }] = (
const [slates, keys, subscriptions, following, followers] = (
await Promise.allSettled([
Data.getSlatesByUserId({ ownerId: id, includeFiles: true }),
Data.getAPIKeysByUserId({ userId: id }),
Data.getSubscriptionsByUserId({ ownerId: id }),
Data.getFollowingByUserId({ ownerId: id }),
Data.getFollowersByUserId({ userId: id }),
Utilities.getBucketAPIFromUserToken({ user }),
])
).map((item) => item.value);
@ -217,7 +217,7 @@ export const getById = async ({ id }) => {
pdfBytes,
},
// tags,
userBucketCID: bucketRoot?.path || null,
// userBucketCID: bucketRoot?.path || null,
keys,
slates,
subscriptions,
@ -245,9 +245,7 @@ export const getDealHistory = async ({ id }) => {
let deals = [];
try {
const FilecoinSingleton = await Utilities.getFilecoinAPIFromUserToken({
user,
});
const FilecoinSingleton = await Utilities.getBucket({ user });
const { filecoin } = FilecoinSingleton;
const records = await filecoin.storageDealRecords({
@ -312,10 +310,9 @@ export const getTextileById = async ({ id }) => {
}
// NOTE(jim): This bucket is purely for staging data for other deals.
const stagingData = await Utilities.getBucketAPIFromUserToken({
const stagingData = await Utilities.getBucket({
user,
bucketName: STAGING_DEAL_BUCKET,
encrypted: false,
bucketName: Constants.textile.dealsBucket,
});
const FilecoinSingleton = await Utilities.getFilecoinAPIFromUserToken({
@ -361,7 +358,7 @@ export const getTextileById = async ({ id }) => {
}
let items = null;
const dealBucket = r.find((bucket) => bucket.name === STAGING_DEAL_BUCKET);
const dealBucket = r.find((bucket) => bucket.name === Constants.textile.dealsBucket);
try {
const path = await stagingData.buckets.listPath(dealBucket.key, "/");
items = path.item.items;
@ -375,11 +372,7 @@ export const getTextileById = async ({ id }) => {
});
}
const b = await Utilities.getBucketAPIFromUserToken({
user,
bucketName: "data",
encrypted: false,
});
const b = await Utilities.getBucket({ user });
const settings = await b.buckets.defaultArchiveConfig(b.bucketKey);

View File

@ -18,7 +18,10 @@
// name: user.name,
// twitterUsername: user.twitterUsername,
// twitterVerified: user.twitterVerified,
// textileKey: user.textileKey,
// textileToken: user.textileToken,
// textileThreadID: user.textileThreadID,
// textileBucketCID: user.textileThreadID,
// settingsDealAutoApprove: user.settingsDealAutoApprove,
// allowAutomaticDataStorage: user.allowAutomaticDataStorage,
// allowEncryptedDataStorage: user.allowEncryptedDataStorage,

View File

@ -18,8 +18,6 @@ const ENCRYPTION_IV = crypto.randomBytes(16);
import { Buckets, PrivateKey, Filecoin, Client, ThreadID } from "@textile/hub";
const BUCKET_NAME = "data";
const TEXTILE_KEY_INFO = {
key: Environment.TEXTILE_HUB_KEY,
secret: Environment.TEXTILE_HUB_SECRET,
@ -117,8 +115,8 @@ export const parseAuthHeader = (value) => {
};
export const getFilecoinAPIFromUserToken = async ({ user }) => {
const token = user.textileToken;
const identity = await PrivateKey.fromString(token);
const textileKey = user.textileKey;
const identity = await PrivateKey.fromString(textileKey);
const filecoin = await Filecoin.withKeyInfo(TEXTILE_KEY_INFO);
await filecoin.getToken(identity);
@ -161,43 +159,154 @@ export const addExistingCIDToData = async ({ buckets, key, path, cid }) => {
};
// NOTE(jim): Requires @textile/hub
export const getBucketAPIFromUserToken = async ({ user, bucketName, encrypted = false }) => {
const token = user.textileToken;
const name = Strings.isEmpty(bucketName) ? BUCKET_NAME : bucketName;
const identity = await PrivateKey.fromString(token);
// export const getBucketAPIFromUserToken = async ({
// user,
// bucketName = Constants.textile.mainBucket,
// encrypted = false,
// }) => {
// const token = user.textileToken;
// const name = bucketName;
// const identity = await PrivateKey.fromString(token);
// let buckets = await Buckets.withKeyInfo(TEXTILE_KEY_INFO);
// const textileToken = await buckets.getToken(identity);
// let root = null;
// Logging.log(`buckets.getOrCreate() init ${name}`);
// try {
// Logging.log("before buckets get or create");
// const created = await buckets.getOrCreate(name, { encrypted });
// Logging.log("after buckets get or create");
// root = created.root;
// } catch (e) {
// Logging.log(`buckets.getOrCreate() warning: ${e.message}`);
// Social.sendTextileSlackMessage({
// file: "/node_common/utilities.js",
// user,
// message: e.message,
// code: e.code,
// functionName: `buckets.getOrCreate`,
// });
// }
// if (!root) {
// Logging.error(`buckets.getOrCreate() failed for ${name}`);
// return { buckets: null, bucketKey: null, bucketRoot: null };
// }
// Logging.log(`buckets.getOrCreate() success for ${name}`);
// return {
// buckets,
// bucketKey: root.key,
// bucketRoot: root,
// bucketName: name,
// };
// };
//NOTE(martina): only use this upon creating a new user. This creates their bucket without checking for an existing bucket
export const createBucket = async ({
bucketName = Constants.textile.mainBucket,
encrypted = false,
}) => {
try {
const identity = await PrivateKey.fromRandom();
const textileKey = identity.toString();
let buckets = await Buckets.withKeyInfo(TEXTILE_KEY_INFO);
await buckets.getToken(identity);
const textileToken = await buckets.getToken(identity);
buckets.context.withToken(textileToken);
let root = null;
Logging.log(`buckets.getOrCreate() init ${name}`);
try {
Logging.log("before buckets get or create");
const created = await buckets.getOrCreate(name, { encrypted });
Logging.log("after buckets get or create");
root = created.root;
} catch (e) {
Logging.log(`buckets.getOrCreate() warning: ${e.message}`);
Social.sendTextileSlackMessage({
file: "/node_common/utilities.js",
user,
message: e.message,
code: e.code,
functionName: `buckets.getOrCreate`,
const client = new Client(buckets.context);
const newId = ThreadID.fromRandom();
await client.newDB(newId, Constants.textile.threadName);
const textileThreadID = newId.toString();
buckets.context.withThread(textileThreadID);
const created = await buckets.create(bucketName, { encrypted });
let ipfs = created.root.path;
const textileBucketCID = Strings.ipfsToCid(ipfs);
console.log({
textileKey,
textileToken,
textileThreadID,
textileBucketCID,
buckets,
bucketKey: created.root.key,
bucketRoot: created.root,
bucketName,
});
return {
textileKey,
textileToken,
textileThreadID,
textileBucketCID,
buckets,
bucketKey: created.root.key,
bucketRoot: created.root,
bucketName,
};
} catch (e) {
Logging.error(e?.message);
}
};
//NOTE(martina): only use this for existing users. This grabs their bucket without checking for an existing bucket
export const getBucket = async ({ user, bucketName = Constants.textile.mainBucket }) => {
let updateUser = false;
let { textileKey, textileToken, textileThreadID, textileBucketCID } = user;
if (!textileKey) {
return await createBucket({ user, bucketName });
}
if (!root) {
Logging.error(`buckets.getOrCreate() failed for ${name}`);
return { buckets: null, bucketKey: null, bucketRoot: null };
let buckets = await Buckets.withKeyInfo(TEXTILE_KEY_INFO);
if (!textileToken) {
const identity = PrivateKey.fromString(textileKey);
textileToken = await buckets.getToken(identity);
updateUser = true;
}
buckets.context.withToken(textileToken);
if (!textileThreadID) {
const client = new Client(buckets.context);
const res = await client.getThread("buckets");
textileThreadID = typeof res.id === "string" ? res.id : ThreadID.fromBytes(res.id).toString();
updateUser = true;
}
buckets.context.withThread(textileThreadID);
const roots = await buckets.list();
const existing = roots.find((bucket) => bucket.name === bucketName);
if (!existing) {
return { buckets: null, bucketKey: null, bucketRoot: null, bucketName };
}
Logging.log(`buckets.getOrCreate() success for ${name}`);
if (!textileBucketCID) {
let ipfs = existing.path;
textileBucketCID = Strings.ipfsToCid(ipfs);
updateUser = true;
}
if (updateUser) {
Data.updateUserById({ id: user.id, textileToken, textileThreadID, textileBucketCID });
}
console.log({
buckets,
bucketKey: existing.key,
bucketRoot: existing,
bucketName,
});
return {
buckets,
bucketKey: root.key,
bucketRoot: root,
bucketName: name,
bucketKey: existing.key,
bucketRoot: existing,
bucketName,
};
};

View File

@ -1,4 +1,4 @@
import * as Data from "~/node_common/data";
import * as Constants from "~/node_common/constants";
import * as Utilities from "~/node_common/utilities";
import * as Social from "~/node_common/social";
import * as Strings from "~/common/strings";
@ -8,19 +8,17 @@ import * as RequestUtilities from "~/node_common/request-utilities";
import { v4 as uuid } from "uuid";
import { MAX_BUCKET_COUNT, MIN_ARCHIVE_SIZE_BYTES } from "~/node_common/constants";
const STAGING_DEAL_BUCKET = "stage-deal";
export default async (req, res) => {
const userInfo = await RequestUtilities.checkAuthorizationInternal(req, res);
if (!userInfo) return;
const { id, user } = userInfo;
let bucketName = null;
let bucketName = Constants.bucketNames.deals;
if (req.body.data && req.body.data.bucketName) {
bucketName = req.body.data.bucketName;
}
const { buckets, bucketKey, bucketRoot } = await Utilities.getBucketAPIFromUserToken({
const { buckets, bucketKey } = await Utilities.getBucket({
user,
bucketName,
});
@ -37,7 +35,7 @@ export default async (req, res) => {
let items = null;
let bucketSizeBytes = 0;
try {
const path = await buckets.listPath(bucketRoot.key, "/");
const path = await buckets.listPath(bucketKey, "/");
items = path.item;
bucketSizeBytes = path.item.size;
} catch (e) {
@ -108,7 +106,7 @@ export default async (req, res) => {
// NOTE(jim): Either encrypt the bucket or don't encrypt the bucket.
let encryptThisDeal = false;
if (bucketName !== STAGING_DEAL_BUCKET && user.allowEncryptedDataStorage) {
if (bucketName !== Constants.bucketNames.deals && user.allowEncryptedDataStorage) {
encryptThisDeal = true;
}
@ -116,7 +114,7 @@ export default async (req, res) => {
encryptThisDeal = true;
}
let key = bucketRoot.key;
let key = bucketKey;
let encryptedBucketName = null;
if (user.allowEncryptedDataStorage || req.body.data.forceEncryption) {
encryptedBucketName = req.body.data.forceEncryption

View File

@ -13,7 +13,7 @@ export default async (req, res) => {
return res.status(500).send({ decorator: "SERVER_BUCKET_REMOVE_NO_CID", error: true });
}
const { buckets, bucketKey } = await Utilities.getBucketAPIFromUserToken({
const { buckets, bucketKey } = await Utilities.getBucket({
user,
bucketName: req.body.data.bucketName,
});

View File

@ -1,3 +1,4 @@
import * as Constants from "~/node_common/constants";
import * as Data from "~/node_common/data";
import * as Utilities from "~/node_common/utilities";
import * as Arrays from "~/common/arrays";
@ -7,8 +8,6 @@ import * as ViewerManager from "~/node_common/managers/viewer";
import * as SearchManager from "~/node_common/managers/search";
import * as RequestUtilities from "~/node_common/request-utilities";
const DEFAULT_BUCKET_NAME = "data";
export default async (req, res) => {
const userInfo = await RequestUtilities.checkAuthorizationInternal(req, res);
if (!userInfo) return;
@ -25,9 +24,7 @@ export default async (req, res) => {
return res.status(400).send({ decorator: "SERVER_REMOVE_DATA_NO_IDS", error: true });
}
const { buckets, bucketKey } = await Utilities.getBucketAPIFromUserToken({
user,
});
const { buckets, bucketKey } = await Utilities.getBucket({ user });
if (!buckets) {
return res.status(500).send({
@ -58,7 +55,7 @@ export default async (req, res) => {
let items = [];
try {
for (let i = 0; i < r.length; i++) {
if (r[i].name === DEFAULT_BUCKET_NAME) {
if (r[i].name === Constants.textile.mainBucket) {
const next = await buckets.listPath(r[i].key, "/");
const set = next.item.items;
items = [...set, ...items];

View File

@ -9,9 +9,7 @@ export default async (req, res) => {
if (!userInfo) return;
const { id, user } = userInfo;
const { buckets, bucketKey } = await Utilities.getBucketAPIFromUserToken({
user,
});
const { buckets, bucketKey } = await Utilities.getBucket({ user });
if (!buckets) {
return res.status(500).send({ decorator: "SERVER_NO_BUCKET_DATA", error: true });

View File

@ -17,9 +17,7 @@ export default async (req, res) => {
let decorator = "SERVER_SAVE_COPY";
let { buckets, bucketKey, bucketRoot } = await Utilities.getBucketAPIFromUserToken({
user,
});
let { buckets, bucketKey, bucketRoot } = await Utilities.getBucket({ user });
if (!buckets) {
return res.status(500).send({

View File

@ -8,8 +8,6 @@ import * as Constants from "~/node_common/constants";
import JWT from "jsonwebtoken";
import { PrivateKey } from "@textile/hub";
export default async (req, res) => {
const { pin, username } = req.body.data;
@ -83,21 +81,14 @@ export default async (req, res) => {
return res.status(201).send({ decorator: "SERVER_CREATE_USER_USERNAME_TAKEN" });
}
// TODO(jim):
// Single Key Textile Auth.
const identity = await PrivateKey.fromRandom();
const textileToken = identity.toString();
// TODO(jim):
// Don't do this once you refactor.
const { buckets, bucketKey, bucketName } = await Utilities.getBucketAPIFromUserToken({
user: {
username: newUsername,
const {
textileKey,
textileToken,
},
});
textileThreadID,
textileBucketCID,
} = await Utilities.createBucket({});
if (!buckets) {
if (!textileKey || !textileToken || !textileThreadID || !textileBucketCID) {
return res
.status(500)
.send({ decorator: "SERVER_CREATE_USER_BUCKET_INIT_FAILURE", error: true });
@ -109,7 +100,10 @@ export default async (req, res) => {
twitterId: twitterUser.id_str,
twitterUsername: twitterUser.screen_name,
twitterVerified: twitterUser.verified,
textileKey,
textileToken,
textileThreadID,
textileBucketCID,
});
if (!user) {

View File

@ -7,8 +7,6 @@ import * as SlateManager from "~/node_common/managers/slate";
import JWT from "jsonwebtoken";
import { PrivateKey } from "@textile/hub";
const COOKIE_NAME = "oauth_token";
export default async (req, res) => {
@ -64,22 +62,17 @@ export default async (req, res) => {
return res.status(201).send({ decorator: "SERVER_CREATE_USER_USERNAME_TAKEN" });
}
// TODO(jim):
// Single Key Textile Auth.
const identity = await PrivateKey.fromRandom();
const textileToken = identity.toString();
const newUsername = username.toLowerCase();
const newEmail = email.toLowerCase();
const { buckets, bucketKey, bucketName } = await Utilities.getBucketAPIFromUserToken({
user: {
username: newUsername,
const {
textileKey,
textileToken,
},
});
textileThreadID,
textileBucketCID,
} = await Utilities.createBucket({});
if (!buckets) {
if (!textileKey || !textileToken || !textileThreadID || !textileBucketCID) {
return res
.status(500)
.send({ decorator: "SERVER_CREATE_USER_BUCKET_INIT_FAILURE", error: true });
@ -90,8 +83,11 @@ export default async (req, res) => {
email: newEmail,
twitterId: twitterUser.id_str,
twitterUsername: twitterUser.screen_name,
twitterVerifeid: twitterUser.verified,
twitterVerified: twitterUser.verified,
textileKey,
textileToken,
textileThreadID,
textileBucketCID,
});
if (!user) {

View File

@ -9,8 +9,6 @@ import * as Monitor from "~/node_common/monitor";
import BCrypt from "bcrypt";
import { PrivateKey } from "@textile/hub";
export default async (req, res) => {
if (!Strings.isEmpty(Environment.ALLOWED_HOST) && req.headers.host !== Environment.ALLOWED_HOST) {
return res.status(403).send({ decorator: "SERVER_CREATE_USER_NOT_ALLOWED", error: true });
@ -54,24 +52,17 @@ export default async (req, res) => {
const salt = await BCrypt.genSalt(rounds);
const hash = await Utilities.encryptPassword(req.body.data.password, salt);
// TODO(jim):
// Single Key Textile Auth.
const identity = await PrivateKey.fromRandom();
const textileToken = identity.toString();
// TODO(jim):
// Don't do this once you refactor.
const newUsername = req.body.data.username.toLowerCase();
const newEmail = verification.email;
const { buckets, bucketKey, bucketName } = await Utilities.getBucketAPIFromUserToken({
user: {
username: newUsername,
const {
textileKey,
textileToken,
},
});
textileThreadID,
textileBucketCID,
} = await Utilities.createBucket({});
if (!buckets) {
if (!textileKey || !textileToken || !textileThreadID || !textileBucketCID) {
return res
.status(500)
.send({ decorator: "SERVER_CREATE_USER_BUCKET_INIT_FAILURE", error: true });
@ -82,7 +73,10 @@ export default async (req, res) => {
salt,
username: newUsername,
email: newEmail,
textileKey,
textileToken,
textileThreadID,
textileBucketCID,
});
if (!user) {

View File

@ -26,7 +26,7 @@ export default async (req, res) => {
files = await Data.deleteFilesByUserId({ ownerId: id });
console.log({ files });
const defaultData = await Utilities.getBucketAPIFromUserToken({ user });
const defaultData = await Utilities.getBucket({ user });
// NOTE(jim): delete every bucket
try {
@ -49,7 +49,7 @@ export default async (req, res) => {
// NOTE(jim): remove orphan
await Data.createOrphan({
data: { token: user.textileToken },
data: { token: user.textileKey },
});
// NOTE(jim): finally delete user by id (irreversible)

View File

@ -92,10 +92,7 @@ export default async (req, res) => {
if (req.body.data.type === "SAVE_DEFAULT_ARCHIVE_CONFIG") {
let b;
try {
b = await Utilities.getBucketAPIFromUserToken({
user,
bucketName: "data",
});
b = await Utilities.getBucket({ user });
} catch (e) {
Logging.error(e);
Social.sendTextileSlackMessage({
@ -103,7 +100,7 @@ export default async (req, res) => {
user,
message: e.message,
code: e.code,
functionName: `Utilities.getBucketAPIFromUserToken`,
functionName: `Utilities.getBucket`,
});
return res.status(500).send({ decorator: "SERVER_NO_BUCKET_DATA", error: true });

View File

@ -12,9 +12,7 @@ export default async (req, res) => {
return res.status(403).send({ decorator: "SERVER_USER_NOT_FOUND", error: true });
}
let { buckets, bucketKey } = await Utilities.getBucketAPIFromUserToken({
user,
});
let { buckets, bucketKey } = await Utilities.getBucket({ user });
if (!buckets) {
return res.status(500).send({

View File

@ -20,8 +20,6 @@ import Section from "~/components/core/Section";
import ScenePage from "~/components/core/ScenePage";
import ScenePageHeader from "~/components/core/ScenePageHeader";
const STAGING_DEAL_BUCKET = "stage-deal";
const STYLES_SPINNER_CONTAINER = css`
width: 100%;
height: 40vh;
@ -110,7 +108,7 @@ export default class SceneMakeFilecoinDeal extends React.Component {
const file = e.target.files[i];
const response = await FileUtilities.upload({
bucketName: STAGING_DEAL_BUCKET,
bucketName: Constants.textile.dealsBucket,
file,
});
}
@ -132,7 +130,7 @@ export default class SceneMakeFilecoinDeal extends React.Component {
this.setState({ archiving: true });
const response = await Actions.archive({
bucketName: STAGING_DEAL_BUCKET,
bucketName: Constants.textile.dealsBucket,
forceEncryption: this.state.encryption,
settings: {
/**
@ -213,7 +211,7 @@ export default class SceneMakeFilecoinDeal extends React.Component {
_handleRemove = async (cid) => {
this.setState({ loading: true });
await Actions.removeFromBucket({ bucketName: STAGING_DEAL_BUCKET, cid });
await Actions.removeFromBucket({ bucketName: Constants.textile.dealsBucket, cid });
let networkViewer;
try {

View File

@ -183,10 +183,7 @@ export default class SceneSettingsDeveloper extends React.Component {
}
}
let userBucketCID = this.props.viewer?.userBucketCID;
if (userBucketCID) {
userBucketCID = userBucketCID.replace("/ipfs/", "");
}
let textileBucketCID = this.props.viewer?.textileBucketCID;
return (
<WebsitePrototypeWrapper
@ -240,7 +237,7 @@ export default class SceneSettingsDeveloper extends React.Component {
files to Slate. You can have a maximum of 10 keys at any given time.
</ScenePageHeader>
{userBucketCID && (
{textileBucketCID && (
<div style={{ marginTop: 34, marginBottom: 24 }}>
<System.DescriptionGroup
style={{ maxWidth: 640 }}
@ -250,7 +247,7 @@ export default class SceneSettingsDeveloper extends React.Component {
}
/>
<input
value={this.state.copying ? "Copied!" : userBucketCID}
value={this.state.copying ? "Copied!" : textileBucketCID}
css={STYLES_API_KEY}
style={{ textOverflow: "ellipsis" }}
type="text"

View File

@ -11,27 +11,18 @@ const db = knex(envConfig);
Logging.log(`RUNNING: adjust.js`);
const addNewFieldsLinks = db.schema.table("files", function (table) {
table.string("url").nullable();
table.boolean("isLink").notNullable().defaultTo(false);
const renameExistingColumn = db.schema.table("users", function (table) {
table.renameColumn("textileToken", "textileKey");
});
const addNewFieldsFiles = db.schema.table("files", function (table) {
table.string("type").nullable();
table.integer("size").notNullable().defaultTo(0);
table.string("name").nullable();
table.string("body").nullable();
table.jsonb("coverImage").nullable();
table.string("author").nullable();
table.string("source").nullable();
const addNewColumns = db.schema.table("users", function (table) {
table.string("textileToken").nullable();
table.string("textileThreadID").nullable();
table.string("textileBucketCID").nullable();
});
const addNewFieldsUsers = db.schema.table("users", function (table) {
table.string("name").nullable();
table.string("body").nullable();
table.string("photo").nullable();
table.string("twitter").nullable();
table.boolean("twitterVerified").notNullable().defaultTo(false);
const editColumnLength = db.schema.table("users", function (table) {
table.string("textileToken", 400).nullable().alter();
});
const addNewFieldsSlates = db.schema.table("slates", function (table) {
@ -40,7 +31,7 @@ const addNewFieldsSlates = db.schema.table("slates", function (table) {
table.string("preview").nullable();
});
Promise.all([addNewFieldsLinks]);
Promise.all([editColumnLength]);
Logging.log(`FINISHED: adjust.js`);
Logging.log(` CTRL +C to return to terminal.`);

View File

@ -118,7 +118,10 @@ const addUserColumns = async () => {
table.string("name").nullable();
table.string("twitterUsername").nullable();
table.boolean("twitterVerified").notNullable().defaultTo(false);
table.string("textileToken").nullable();
table.string("textileKey").nullable();
table.string("textileToken", 400).nullable();
table.string("textileThreadID").nullable();
table.string("textileBucketCID").nullable();
table.boolean("settingsDealsAutoApprove").notNullable().defaultTo(false);
table.boolean("allowAutomaticDataStorage").notNullable().defaultTo(true);
table.boolean("allowEncryptedDataStorage").notNullable().defaultTo(true);
@ -191,7 +194,7 @@ const migrateUserTable = async () => {
name: data.name,
body: data.body,
photo: data.photo,
textileToken: data.tokens?.api,
textileKey: data.tokens?.api,
settingsDealsAutoApprove: data.settings?.settings_deals_auto_approve,
allowAutomaticDataStorage: data.settings?.allow_automatic_data_storage,
allowEncryptedDataStorage: data.settings?.allow_encrypted_data_storage,
@ -343,7 +346,7 @@ Users
'data.body', -> 'body' MIGRATED
'data.photo', -> 'photo' MIGRATED
'data.status', -> 'onboarding.hidePrivacyAlert' MIGRATED
'data.tokens.api', -> 'textileToken' MIGRATED
'data.tokens.api', -> 'textileKey' MIGRATED
'data.settings.settings_deals_auto_approve', -> 'settingsDealsAutoApprove' MIGRATED
'data.settings.allow_automatic_data_storage', -> 'allowAutomaticDataStorage' MIGRATED
'data.settings.allow_encrypted_data_storage', -> 'allowEncryptedDataStorage' MIGRATED

View File

@ -35,9 +35,7 @@ const saveCopyReposts = async () => {
Logging.log(item);
// continue;
let user = { data: item.data };
let { buckets, bucketKey, bucketRoot } = await Utilities.getBucketAPIFromUserToken({
user,
});
let { buckets, bucketKey, bucketRoot } = await Utilities.getBucket({ user });
try {
let response = await Utilities.addExistingCIDToData({

View File

@ -39,7 +39,10 @@ const createUsersTable = db.schema.createTable("users", function (table) {
table.string("twitterId").unique().nullable();
table.string("twitterUsername").nullable();
table.boolean("twitterVerified").notNullable().defaultTo(false);
table.string("textileToken").nullable();
table.string("textileKey").nullable();
table.string("textileToken", 400).nullable();
table.string("textileThreadID").nullable();
table.string("textileBucketCID").nullable();
table.boolean("settingsDealsAutoApprove").notNullable().defaultTo(false);
table.boolean("allowAutomaticDataStorage").notNullable().defaultTo(true);
table.boolean("allowEncryptedDataStorage").notNullable().defaultTo(true);

View File

@ -95,7 +95,7 @@ const run = async () => {
await delay(500);
try {
const token = user.textileToken;
const token = user.textileKey;
const identity = await PrivateKey.fromString(token);
buckets = await Buckets.withKeyInfo(TEXTILE_KEY_INFO);
await buckets.getToken(identity);