3
0
mirror of https://github.com/Qortal/q-support.git synced 2025-02-11 17:55:50 +00:00

Merge pull request #11 from QortalSeth/main

Publishing is now a file instead of Base64. This will reduce load times by about 1/3 on publishes after this update.
This commit is contained in:
Qortal Dev 2024-07-10 12:48:05 -06:00 committed by GitHub
commit 9434eef241
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 224 additions and 213 deletions

View File

@ -32,7 +32,7 @@ import {
import { setNotification } from "../../state/features/notificationsSlice.ts"; import { setNotification } from "../../state/features/notificationsSlice.ts";
import { RootState } from "../../state/store.ts"; import { RootState } from "../../state/store.ts";
import { BountyData, validateBountyInput } from "../../utils/qortalRequests.ts"; import { BountyData, validateBountyInput } from "../../utils/qortalRequests.ts";
import { objectToBase64 } from "../../utils/toBase64.js"; import { objectToBase64, objectToFile } from "../../utils/PublishFormatter.ts";
import { isNumber } from "../../utils/utilFunctions.ts"; import { isNumber } from "../../utils/utilFunctions.ts";
import { import {
AutocompleteQappNames, AutocompleteQappNames,
@ -360,13 +360,12 @@ export const EditIssue = () => {
if (log) if (log)
console.log("% of characters used:", metadescription.length / 240); console.log("% of characters used:", metadescription.length / 240);
const fileObjectToBase64 = await objectToBase64(issueObject);
// Description is obtained from raw data // Description is obtained from raw data
const requestBodyJson: any = { const requestBodyJson: any = {
action: "PUBLISH_QDN_RESOURCE", action: "PUBLISH_QDN_RESOURCE",
name: name, name: name,
service: "DOCUMENT", service: "DOCUMENT",
data64: fileObjectToBase64, file: objectToFile(issueObject),
title: title.slice(0, 50), title: title.slice(0, 50),
description: metadescription, description: metadescription,
identifier: editIssueProperties.id, identifier: editIssueProperties.id,

View File

@ -27,7 +27,7 @@ import ShortUniqueId from "short-unique-id";
import { useDispatch, useSelector } from "react-redux"; import { useDispatch, useSelector } from "react-redux";
import { setNotification } from "../../state/features/notificationsSlice"; import { setNotification } from "../../state/features/notificationsSlice";
import { objectToBase64 } from "../../utils/toBase64"; import { objectToBase64, objectToFile } from "../../utils/PublishFormatter.ts";
import { RootState } from "../../state/store"; import { RootState } from "../../state/store";
import { import {
setEditPlaylist, setEditPlaylist,
@ -159,14 +159,16 @@ export const EditPlaylist = () => {
const responseDataSearchVid = await response.json(); const responseDataSearchVid = await response.json();
if (responseDataSearchVid?.length > 0) { if (responseDataSearchVid?.length > 0) {
let resourceData2 = responseDataSearchVid[0]; const resourceData2 = responseDataSearchVid[0];
videos.push(resourceData2); videos.push(resourceData2);
} }
} }
} }
combinedData.videos = videos; combinedData.videos = videos;
setPlaylistData(combinedData); setPlaylistData(combinedData);
} catch (error) {} } catch (error) {
console.log(error);
}
}, []); }, []);
useEffect(() => { useEffect(() => {
@ -266,13 +268,13 @@ export const EditPlaylist = () => {
if (!descriptionVid) throw new Error("cannot find video code"); if (!descriptionVid) throw new Error("cannot find video code");
// Split the string by ';' // Split the string by ';'
let parts = descriptionVid.split(";"); const parts = descriptionVid.split(";");
// Initialize a variable to hold the code value // Initialize a variable to hold the code value
let codeValue = ""; let codeValue = "";
// Loop through the parts to find the one that starts with 'code:' // Loop through the parts to find the one that starts with 'code:'
for (let part of parts) { for (const part of parts) {
if (part.startsWith("code:")) { if (part.startsWith("code:")) {
codeValue = part.split(":")[1]; codeValue = part.split(":")[1];
break; break;
@ -309,11 +311,10 @@ export const EditPlaylist = () => {
}; };
const codes = videoStructured.map(item => `c:${item.code};`).join(""); const codes = videoStructured.map(item => `c:${item.code};`).join("");
let metadescription = const metadescription =
`**category:${category};subcategory:${subcategory};${codes}**` + `**category:${category};subcategory:${subcategory};${codes}**` +
stringDescription.slice(0, 120); stringDescription.slice(0, 120);
const crowdfundObjectToBase64 = await objectToBase64(playlistObject);
// Description is obtained from raw data // Description is obtained from raw data
let identifier = editVideoProperties?.id; let identifier = editVideoProperties?.id;
@ -325,7 +326,7 @@ export const EditPlaylist = () => {
action: "PUBLISH_QDN_RESOURCE", action: "PUBLISH_QDN_RESOURCE",
name: username, name: username,
service: "PLAYLIST", service: "PLAYLIST",
data64: crowdfundObjectToBase64, file: objectToFile(playlistObject),
title: title.slice(0, 50), title: title.slice(0, 50),
description: metadescription, description: metadescription,
identifier: identifier, identifier: identifier,

View File

@ -35,7 +35,7 @@ import { ThemeButtonBright } from "../../pages/Home/Home-styles.tsx";
import { setNotification } from "../../state/features/notificationsSlice"; import { setNotification } from "../../state/features/notificationsSlice";
import { RootState } from "../../state/store"; import { RootState } from "../../state/store";
import { BountyData, validateBountyInput } from "../../utils/qortalRequests.ts"; import { BountyData, validateBountyInput } from "../../utils/qortalRequests.ts";
import { objectToBase64 } from "../../utils/toBase64"; import { objectToBase64, objectToFile } from "../../utils/PublishFormatter.ts";
import { isNumber } from "../../utils/utilFunctions.ts"; import { isNumber } from "../../utils/utilFunctions.ts";
import { import {
AutocompleteQappNames, AutocompleteQappNames,
@ -325,13 +325,12 @@ export const PublishIssue = ({ editId, editContent }: NewCrowdfundProps) => {
if (log) if (log)
console.log("% of characters used:", metadescription.length / 240); console.log("% of characters used:", metadescription.length / 240);
const fileObjectToBase64 = await objectToBase64(issueObject);
// Description is obtained from raw data // Description is obtained from raw data
const requestBodyJson: any = { const requestBodyJson: any = {
action: "PUBLISH_QDN_RESOURCE", action: "PUBLISH_QDN_RESOURCE",
name: name, name: name,
service: "DOCUMENT", service: "DOCUMENT",
data64: fileObjectToBase64, file: objectToFile(issueObject),
title: title.slice(0, 50), title: title.slice(0, 50),
description: metadescription, description: metadescription,
identifier: identifier + "_metadata", identifier: identifier + "_metadata",

View File

@ -239,7 +239,7 @@ const CommentCard = ({
</StyledCardColComment> </StyledCardColComment>
</StyledCardHeaderComment> </StyledCardHeaderComment>
<StyledCardContentComment> <StyledCardContentComment>
<StyledCardComment>{message}</StyledCardComment> <StyledCardComment paragraph={false}>{message}</StyledCardComment>
</StyledCardContentComment> </StyledCardContentComment>
<Box <Box
sx={{ sx={{

View File

@ -4,6 +4,7 @@ import { RootState } from "../../../state/store";
import ShortUniqueId from "short-unique-id"; import ShortUniqueId from "short-unique-id";
import { setNotification } from "../../../state/features/notificationsSlice"; import { setNotification } from "../../../state/features/notificationsSlice";
import localforage from "localforage"; import localforage from "localforage";
import { stringToFile } from "../../../utils/PublishFormatter.ts";
import { import {
CommentInput, CommentInput,
CommentInputContainer, CommentInputContainer,
@ -33,11 +34,13 @@ export interface Item {
export async function addItem(item: Item): Promise<void> { export async function addItem(item: Item): Promise<void> {
// Get all items // Get all items
let notificationComments: Item[] = const notificationComments: Item[] =
(await notification.getItem("comments")) || []; (await notification.getItem("comments")) || [];
// Find the item with the same id, if it exists // Find the item with the same id, if it exists
let existingItemIndex = notificationComments.findIndex(i => i.id === item.id); const existingItemIndex = notificationComments.findIndex(
i => i.id === item.id
);
if (existingItemIndex !== -1) { if (existingItemIndex !== -1) {
// If the item exists, update its date // If the item exists, update its date
@ -58,10 +61,10 @@ export async function addItem(item: Item): Promise<void> {
} }
export async function updateItemDate(item: any): Promise<void> { export async function updateItemDate(item: any): Promise<void> {
// Get all items // Get all items
let notificationComments: Item[] = const notificationComments: Item[] =
(await notification.getItem("comments")) || []; (await notification.getItem("comments")) || [];
let notificationCreatorComment: any = const notificationCreatorComment: any =
(await notification.getItem("post-comments")) || {}; (await notification.getItem("post-comments")) || {};
const findPostId = notificationCreatorComment[item.postId]; const findPostId = notificationCreatorComment[item.postId];
if (findPostId) { if (findPostId) {
@ -124,11 +127,9 @@ export const CommentEditor = ({
identifier: string, identifier: string,
idForNotification?: string idForNotification?: string
) => { ) => {
let address; const address = user?.address;
let name; const name = user?.name || "";
let errorMsg = ""; let errorMsg = "";
address = user?.address;
name = user?.name || "";
const notificationMessage = `This is an automated Q-Support notification indicating that someone has commented on your issue here: const notificationMessage = `This is an automated Q-Support notification indicating that someone has commented on your issue here:
qortal://APP/Q-Support/issue/${postName}/${postId}`; qortal://APP/Q-Support/issue/${postName}/${postId}`;
@ -156,12 +157,11 @@ export const CommentEditor = ({
} }
try { try {
const base64 = utf8ToBase64(value);
const resourceResponse = await qortalRequest({ const resourceResponse = await qortalRequest({
action: "PUBLISH_QDN_RESOURCE", action: "PUBLISH_QDN_RESOURCE",
name: name, name: name,
service: "BLOG_COMMENT", service: "BLOG_COMMENT",
data64: base64, file: stringToFile(value),
identifier: identifier, identifier: identifier,
}); });
@ -180,13 +180,6 @@ export const CommentEditor = ({
}); });
} }
if (!isReply && !isEdit) { if (!isReply && !isEdit) {
// const notificationMessage = `This is an automated Q-Support notification indicating that someone has commented on your issue here:
// qortal://APP/Q-Support/issue/${postName}/${postId}
//
// Here are the first ${maxNotificationLength} characters of the comment:
//
// ${value.substring(0, maxNotificationLength)}`;
await sendQchatDM(postName, notificationMessage); await sendQchatDM(postName, notificationMessage);
} }
return resourceResponse; return resourceResponse;
@ -269,5 +262,3 @@ export const CommentEditor = ({
</CommentInputContainer> </CommentInputContainer>
); );
}; };
const sendDMwithComment = () => {};

View File

@ -93,8 +93,9 @@ export const StyledCardComment = styled(Typography)(({ theme }) => ({
letterSpacing: 0, letterSpacing: 0,
fontWeight: 400, fontWeight: 400,
color: theme.palette.text.primary, color: theme.palette.text.primary,
fontSize: "19px", fontSize: "100%",
wordBreak: "break-word", wordBreak: "break-word",
whiteSpace: "pre-wrap",
})); }));
export const TitleText = styled(Typography)({ export const TitleText = styled(Typography)({

View File

@ -1,6 +1,9 @@
import { setFeeData } from "../../../state/features/globalSlice.ts"; import { setFeeData } from "../../../state/features/globalSlice.ts";
import { store } from "../../../state/store.js"; import { store } from "../../../state/store.js";
import { objectToBase64 } from "../../../utils/toBase64.ts"; import {
objectToBase64,
objectToFile,
} from "../../../utils/PublishFormatter.ts";
import { useTestIdentifiers } from "../../Identifiers.ts"; import { useTestIdentifiers } from "../../Identifiers.ts";
import { appName, FEE_BASE, feeAmountBase, FeeType } from "../FeeData.tsx"; import { appName, FEE_BASE, feeAmountBase, FeeType } from "../FeeData.tsx";
@ -50,7 +53,7 @@ export const addFeePrice = async (
feeType: FeeType = "default", feeType: FeeType = "default",
coinType: CoinType = "QORT" coinType: CoinType = "QORT"
) => { ) => {
let fees = await fetchFees(); const fees = await fetchFees();
fees.push({ fees.push({
time: Date.now(), time: Date.now(),
@ -59,14 +62,13 @@ export const addFeePrice = async (
coinType, coinType,
}); });
const feesBase64 = await objectToBase64(fees);
console.log("fees are: ", fees); console.log("fees are: ", fees);
await qortalRequest({ await qortalRequest({
action: "PUBLISH_QDN_RESOURCE", action: "PUBLISH_QDN_RESOURCE",
name: appName, name: appName,
identifier: FEE_BASE, identifier: FEE_BASE,
service: feesPublishService, service: feesPublishService,
data64: feesBase64, file: objectToFile(fees),
}); });
}; };

View File

@ -1,3 +1,4 @@
/* eslint-disable */
import { configureStore } from "@reduxjs/toolkit"; import { configureStore } from "@reduxjs/toolkit";
import authReducer from "./features/authSlice.js"; import authReducer from "./features/authSlice.js";
import fileReducer from "./features/fileSlice.ts"; import fileReducer from "./features/fileSlice.ts";

View File

@ -1,174 +1,191 @@
export const toBase64 = (file: File): Promise<string | ArrayBuffer | null> => export const publishFormatter = (
new Promise((resolve, reject) => { file: File
const reader = new FileReader() ): Promise<string | ArrayBuffer | null> =>
reader.readAsDataURL(file) new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => { reader.readAsDataURL(file);
const result = reader.result
reader.onload = null // remove onload handler reader.onload = () => {
reader.onerror = null // remove onerror handler const result = reader.result;
resolve(result) reader.onload = null; // remove onload handler
} reader.onerror = null; // remove onerror handler
resolve(result);
reader.onerror = (error) => { };
reader.onload = null // remove onload handler
reader.onerror = null // remove onerror handler reader.onerror = error => {
reject(error) reader.onload = null; // remove onload handler
} reader.onerror = null; // remove onerror handler
}) reject(error);
};
export function objectToBase64(obj: any) { });
// Step 1: Convert the object to a JSON string
const jsonString = JSON.stringify(obj) export function objectToBase64(obj: any) {
// Step 1: Convert the object to a JSON string
// Step 2: Create a Blob from the JSON string const jsonString = JSON.stringify(obj);
const blob = new Blob([jsonString], { type: 'application/json' })
// Step 2: Create a Blob from the JSON string
// Step 3: Create a FileReader to read the Blob as a base64-encoded string const blob = new Blob([jsonString], { type: "application/json" });
return new Promise<string>((resolve, reject) => {
const reader = new FileReader() // Step 3: Create a FileReader to read the Blob as a base64-encoded string
reader.onloadend = () => { return new Promise<string>((resolve, reject) => {
if (typeof reader.result === 'string') { const reader = new FileReader();
// Remove 'data:application/json;base64,' prefix reader.onloadend = () => {
const base64 = reader.result.replace( if (typeof reader.result === "string") {
'data:application/json;base64,', // Remove 'data:application/json;base64,' prefix
'' const base64 = reader.result.replace(
) "data:application/json;base64,",
resolve(base64) ""
} else { );
reject(new Error('Failed to read the Blob as a base64-encoded string')) resolve(base64);
} } else {
} reject(new Error("Failed to read the Blob as a base64-encoded string"));
reader.onerror = () => { }
reject(reader.error) };
} reader.onerror = () => {
reader.readAsDataURL(blob) reject(reader.error);
}) };
} reader.readAsDataURL(blob);
});
export function objectToUint8Array(obj: any) { }
// Convert the object to a JSON string
const jsonString = JSON.stringify(obj) export const stringToFile = (text: string) => {
return new File([text], "", {
// Encode the JSON string as a byte array using TextEncoder type: "text/plain",
const encoder = new TextEncoder() });
const byteArray = encoder.encode(jsonString) };
export const objectToFile = (obj: object) => {
// Create a new Uint8Array and set its content to the encoded byte array // Step 1: Convert the object to a JSON string
const uint8Array = new Uint8Array(byteArray) const jsonString = JSON.stringify(obj);
return uint8Array const fileType = { type: "application/json" };
} // Step 2: Create a Blob from the JSON string
const blob = new Blob([jsonString], fileType);
export function uint8ArrayToBase64(uint8Array: Uint8Array): string { return new File([blob], ``, fileType);
const length = uint8Array.length };
let binaryString = ''
const chunkSize = 1024 * 1024 // Process 1MB at a time export function objectToUint8Array(obj: any) {
// Convert the object to a JSON string
for (let i = 0; i < length; i += chunkSize) { const jsonString = JSON.stringify(obj);
const chunkEnd = Math.min(i + chunkSize, length)
const chunk = uint8Array.subarray(i, chunkEnd) // Encode the JSON string as a byte array using TextEncoder
binaryString += Array.from(chunk, (byte) => String.fromCharCode(byte)).join( const encoder = new TextEncoder();
'' const byteArray = encoder.encode(jsonString);
)
} // Create a new Uint8Array and set its content to the encoded byte array
const uint8Array = new Uint8Array(byteArray);
return btoa(binaryString)
} return uint8Array;
}
export function objectToUint8ArrayFromResponse(obj: any) {
const len = Object.keys(obj).length export function uint8ArrayToBase64(uint8Array: Uint8Array): string {
const result = new Uint8Array(len) const length = uint8Array.length;
let binaryString = "";
for (let i = 0; i < len; i++) { const chunkSize = 1024 * 1024; // Process 1MB at a time
result[i] = obj[i]
} for (let i = 0; i < length; i += chunkSize) {
const chunkEnd = Math.min(i + chunkSize, length);
return result const chunk = uint8Array.subarray(i, chunkEnd);
} binaryString += Array.from(chunk, byte => String.fromCharCode(byte)).join(
// export function uint8ArrayToBase64(arrayBuffer: Uint8Array): string { ""
// let binary = '' );
// const bytes = new Uint8Array(arrayBuffer) }
// const len = bytes.length
return btoa(binaryString);
// for (let i = 0; i < len; i++) { }
// binary += String.fromCharCode(bytes[i])
// } export function objectToUint8ArrayFromResponse(obj: any) {
const len = Object.keys(obj).length;
// return btoa(binary) const result = new Uint8Array(len);
// }
for (let i = 0; i < len; i++) {
export function base64ToUint8Array(base64: string) { result[i] = obj[i];
const binaryString = atob(base64) }
const len = binaryString.length
const bytes = new Uint8Array(len) return result;
}
for (let i = 0; i < len; i++) { // export function uint8ArrayToBase64(arrayBuffer: Uint8Array): string {
bytes[i] = binaryString.charCodeAt(i) // let binary = ''
} // const bytes = new Uint8Array(arrayBuffer)
// const len = bytes.length
return bytes
} // for (let i = 0; i < len; i++) {
// binary += String.fromCharCode(bytes[i])
export function uint8ArrayToObject(uint8Array: Uint8Array) { // }
// Decode the byte array using TextDecoder
const decoder = new TextDecoder() // return btoa(binary)
const jsonString = decoder.decode(uint8Array) // }
// Convert the JSON string back into an object export function base64ToUint8Array(base64: string) {
const obj = JSON.parse(jsonString) const binaryString = atob(base64);
const len = binaryString.length;
return obj const bytes = new Uint8Array(len);
}
for (let i = 0; i < len; i++) {
export function processFileInChunks(file: File): Promise<Uint8Array> { bytes[i] = binaryString.charCodeAt(i);
return new Promise( }
(resolve: (value: Uint8Array) => void, reject: (reason?: any) => void) => {
const reader = new FileReader() return bytes;
}
reader.onload = function (event: ProgressEvent<FileReader>) {
const arrayBuffer = event.target?.result as ArrayBuffer export function uint8ArrayToObject(uint8Array: Uint8Array) {
const uint8Array = new Uint8Array(arrayBuffer) // Decode the byte array using TextDecoder
resolve(uint8Array) const decoder = new TextDecoder();
} const jsonString = decoder.decode(uint8Array);
reader.onerror = function (error: ProgressEvent<FileReader>) { // Convert the JSON string back into an object
reject(error) const obj = JSON.parse(jsonString);
}
return obj;
reader.readAsArrayBuffer(file) }
}
) export function processFileInChunks(file: File): Promise<Uint8Array> {
} return new Promise(
(resolve: (value: Uint8Array) => void, reject: (reason?: any) => void) => {
// export async function processFileInChunks(file: File, chunkSize = 1024 * 1024): Promise<Uint8Array> { const reader = new FileReader();
// const fileStream = file.stream();
// const reader = fileStream.getReader(); reader.onload = function (event: ProgressEvent<FileReader>) {
// const totalLength = file.size; const arrayBuffer = event.target?.result as ArrayBuffer;
const uint8Array = new Uint8Array(arrayBuffer);
// if (totalLength <= 0 || isNaN(totalLength)) { resolve(uint8Array);
// throw new Error('Invalid file size'); };
// }
reader.onerror = function (error: ProgressEvent<FileReader>) {
// const combinedArray = new Uint8Array(totalLength); reject(error);
// let offset = 0; };
// while (offset < totalLength) { reader.readAsArrayBuffer(file);
// const { value, done } = await reader.read(); }
);
// if (done) { }
// break;
// } // export async function processFileInChunks(file: File, chunkSize = 1024 * 1024): Promise<Uint8Array> {
// const fileStream = file.stream();
// const chunk = new Uint8Array(value.buffer, value.byteOffset, value.byteLength); // const reader = fileStream.getReader();
// const totalLength = file.size;
// // Set elements one by one instead of using combinedArray.set(chunk, offset)
// for (let i = 0; i < chunk.length; i++) { // if (totalLength <= 0 || isNaN(totalLength)) {
// combinedArray[offset + i] = chunk[i]; // throw new Error('Invalid file size');
// } // }
// offset += chunk.length; // const combinedArray = new Uint8Array(totalLength);
// } // let offset = 0;
// return combinedArray; // while (offset < totalLength) {
// } // const { value, done } = await reader.read();
// if (done) {
// break;
// }
// const chunk = new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
// // Set elements one by one instead of using combinedArray.set(chunk, offset)
// for (let i = 0; i < chunk.length; i++) {
// combinedArray[offset + i] = chunk[i];
// }
// offset += chunk.length;
// }
// return combinedArray;
// }