Initial commit

This commit is contained in:
neil 2023-10-12 14:27:51 -04:00
commit 35eb117c4e
29 changed files with 1398 additions and 0 deletions

1
.env.example Normal file
View File

@ -0,0 +1 @@
REPLICATE_API_TOKEN=YOUR_API_TOKEN

3
.eslintrc.json Normal file
View File

@ -0,0 +1,3 @@
{
"extends": "next/core-web-vitals"
}

34
.gitignore vendored Normal file
View File

@ -0,0 +1,34 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
.env*.local
# vercel
.vercel
/image_proc_server/__pycache__

38
README.md Normal file
View File

@ -0,0 +1,38 @@
This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app).
## Getting Started
First, run the development server:
```bash
npm run dev
# or
yarn dev
# or
pnpm dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
You can start editing the page by modifying `pages/index.js`. The page auto-updates as you edit the file.
[API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.js`.
The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages.
This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.
## Learn More
To learn more about Next.js, take a look at the following resources:
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome!
## Deploy on Vercel
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details.

View File

@ -0,0 +1,150 @@
import React, { useRef, useState } from 'react'
import { Canvas, useFrame, useThree } from '@react-three/fiber'
import styles from "../styles/studio.module.css";
import { OrbitControls } from '@react-three/drei'
import { useLoader } from '@react-three/fiber'
import { GLTFLoader } from 'three/examples/jsm/loaders/GLTFLoader'
import { useGLTF, useGLB } from "@react-three/drei";
// export function CanvasComponent({setCanvasRef}) {
// return (
// <>
// <Canvas
// gl={{ preserveDrawingBuffer: true }}
// >
// <Scene setCanvasRef={setCanvasRef}/>
// <OrbitControls dampingFactor={0.3} />
// </Canvas>
// </>
// )
// }
// function Scene({setCanvasRef}) {
// const gl = useThree((state) => state.gl)
// setCanvasRef(gl.domElement);
// return (
// <>
// <ambientLight />
// <pointLight position={[10, 10, 10]} />
// <Model position={[0, 0, 0]} />
// </>
// )
// }
// function Model(props) {
// const { nodes, materials } = useGLTF("/sneaker.glb");
// return (
// <group {...props} dispose={null}>
// <group rotation={[-Math.PI / 2, 0, 0]}>
// <group rotation={[Math.PI / 2, 0, 0]} scale={0.01}>
// <group
// position={[0, 5.13, 0]}
// rotation={[-Math.PI / 2, 0, 0]}
// scale={100}
// >
// <mesh
// geometry={nodes.Plane_Material_0.geometry}
// material={materials.Material}
// />
// </group>
// </group>
// </group>
// </group>
// );
// }
// useGLTF.preload("/sneaker.glb");
export function CanvasComponent({setCanvasRef, gltfModel}) {
return (
<>
<Canvas
gl={{ alpha:true, preserveDrawingBuffer: true }}
>
<Scene gltfModel={gltfModel} setCanvasRef={setCanvasRef}/>
<OrbitControls dampingFactor={0.3} />
</Canvas>
</>
)
}
// function Scene({ setCanvasRef, gltfModel }) {
// const gl = useThree((state) => state.gl)
// setCanvasRef(gl.domElement);
// return (
// <>
// <Model gltfModel={gltfModel} position={[0, 0, 0]} />
// </>
// )
// }
function Scene({ setCanvasRef, gltfModel }) {
const { gl, camera } = useThree();
// Set camera position closer to the object
// camera.position.set(0, 0, 5); // x, y, z
setCanvasRef(gl.domElement);
return (
<>
<ambientLight />
<pointLight position={[1, 1, 1]} />
<Light brightness={100} color={"white"} /> // highlight-line
<Model gltfModel={gltfModel} position={[0, 0, 0]} />
</>
);
}
function Model({ gltfModel, ...props }) {
console.log(gltfModel)
const { nodes, materials } = gltfModel;
console.log("model loaded")
return (
<group
scale={10}
>
<primitive object={gltfModel.scene} />
</group>
);
}
function Light({ brightness, color }) {
return (
<rectAreaLight
width={3}
height={3}
color={color}
intensity={brightness}
position={[-2, 0, 5]}
lookAt={[0, 0, 0]}
penumbra={1}
castShadow
/>
);
}
// useGLTF.preload("/sneaker.glb");

21
components/textinput.js Normal file
View File

@ -0,0 +1,21 @@
import React, { useState } from 'react';
import styles from "./textinput.module.css";
function TextInput({ onTextChange }) {
const [text, setText] = useState('');
function handleTextChange(event) {
const newText = event.target.value;
setText(newText);
onTextChange(newText);
}
return (
<div className={styles.textInputContainer}>
<textarea id="text-input" wrap="soft" type="text" value={text} onChange={handleTextChange} className={styles.textInput} placeholder='Describe your scene here...'/>
</div>
);
}
export default TextInput;

View File

@ -0,0 +1,14 @@
.textInput {
width: 380px;
overflow: hidden;
height: 100px;
resize: none;
font-size: 1rem;
font-family: Verdana, Geneva, Tahoma, sans-serif;
padding: 10px;
border-radius: 10px;
}
.textInputContainer {
width: 400px;
}

149
image_proc_server/app.py Normal file
View File

@ -0,0 +1,149 @@
from flask import Flask, request
from PIL import Image, ImageOps
import requests
import io
import os
import base64
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
# authorization for removebg
removebg_api_key = "YOUR_API_KEY"
@app.route('/get_item_mask', methods=['POST'])
def get_item_mask():
# use remove bg to get a png image and convert it to a black and white mask image.
print("get item mask called")
image_file = request.files['image']
image_bytes = image_file.read()
image = Image.open(io.BytesIO(image_bytes))
image = image.convert('RGB')
jpeg_buffer = io.BytesIO()
image.save(jpeg_buffer, format='JPEG')
#print(jpeg_buffer.getvalue())
# set API endpoint and headers
api_url = "https://api.remove.bg/v1.0/removebg"
headers = {"X-Api-Key": removebg_api_key}
# send the request to remove.bg API
response = requests.post(api_url, headers=headers, files={'image_file': jpeg_buffer.getvalue()})
# Check if the API returned a successful response
if response.status_code == requests.codes.ok:
# print("response is ok")
# print("Success:", response.status_code, response.text)
# Save the PNG image to a file
with open('output.png', 'wb') as out_file:
out_file.write(response.content)
png_buffer = io.BytesIO()
ux_mask_buffer = io.BytesIO()
response_image = Image.open(io.BytesIO(response.content))
response_image = response_image.convert('RGBA')
ai_mask_image = convert_to_black_and_white(response_image)
ai_mask_image = ai_mask_image.convert('RGB')
ai_mask_image.save(png_buffer, format="JPEG")
ai_mask_image_base64 = base64.b64encode(png_buffer.getvalue()).decode('utf-8')
ux_mask_image = convert_to_green(response_image)
ux_mask_image = ux_mask_image.convert('RGBA')
ux_mask_image.save(ux_mask_buffer, format="PNG")
ux_mask_image.save('greenoutput.png')
ux_mask_image_base64 = base64.b64encode(ux_mask_buffer.getvalue()).decode('utf-8')
return {'ai_mask': ai_mask_image_base64, 'ux_mask': ux_mask_image_base64}
else:
# Print an error message if the API call was not successful
print("Error:", response.status_code, response.text)
return {'error': "There was a problem with the remove bg operation"}
def convert_to_black_and_white(image):
# Create a new image with the same size as the original, but with a white background
new_image = Image.new('RGB', image.size, (255, 255, 255))
# Create a mask for the alpha channel
alpha_mask = image.split()[-1]
modified_alpha_mask = modify_alpha_mask(alpha_mask)
# Paste the original image onto the new image using the alpha channel as the mask
new_image.paste(image, mask=modified_alpha_mask)
new_image.save('temp_mask_image.png')
# Convert the new image to grayscale
new_image = new_image.convert('L')
new_image.save('temp_mask_image_grayscale.png')
# Convert all non-alpha pixels to black and all alpha pixels to white
black_and_white_image = new_image.point(lambda x: 255 if x == 255 else 0, '1')
return black_and_white_image
def modify_alpha_mask(alpha_mask):
# Ensure the input image has an alpha channel
if alpha_mask.mode != 'L':
alpha_mask = alpha_mask.convert('L')
# Create a new image with modified alpha values
modified_alpha_mask = Image.new('L', alpha_mask.size)
# Iterate over each pixel and modify the alpha value
for x in range(alpha_mask.width):
for y in range(alpha_mask.height):
alpha_value = alpha_mask.getpixel((x, y))
# Check if the alpha value is semi-transparent (not fully transparent or opaque)
if alpha_value > 0 and alpha_value < 100:
alpha_value = 0 # Set to fully transparent
# Set the modified alpha value in the new image
modified_alpha_mask.putpixel((x, y), alpha_value)
return modified_alpha_mask
def convert_to_green(image):
# Create a new image with the same size as the original, but with a transparent background
new_image = Image.new('RGBA', image.size, (0, 0, 0, 0))
# Create a mask for the alpha channel
alpha_mask = image.split()[-1]
# Paste the original image onto the new image using the alpha channel as the mask
new_image.paste(image, mask=alpha_mask)
# Create a new image with the same size as the original, but with 0.5 alpha green color
green_image = Image.new('RGBA', image.size, (255, 200, 0, 180))
# Create a mask for the alpha channel
alpha_mask = new_image.split()[-1]
alpha_mask = ImageOps.invert(alpha_mask)
# Paste the green image onto the new image using the alpha channel as the mask
new_image.paste(green_image, mask=alpha_mask)
return new_image

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 77 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

7
jsconfig.json Normal file
View File

@ -0,0 +1,7 @@
{
"compilerOptions": {
"paths": {
"@/*": ["./*"]
}
}
}

29
next.config.js Normal file
View File

@ -0,0 +1,29 @@
module.exports = {
reactStrictMode: true,
images: {
remotePatterns: [
{
protocol: "https",
hostname: "replicate.com",
},
{
protocol: "https",
hostname: "pbxt.replicate.delivery",
},
],
},
// async headers() {
// return [
// {
// source: '/(.*)',
// headers: [
// {
// key: 'Content-Security-Policy',
// value: "default-src 'self'; img-src 'self' blob: https://replicate.com https://pbxt.replicate.delivery; script-src 'self' 'unsafe-inline' 'unsafe-eval'; style-src 'self' 'unsafe-inline'; worker-src 'self' blob:; connect-src 'self' blob:;"
// }
// ],
// },
// ];
// },
};

31
package.json Normal file
View File

@ -0,0 +1,31 @@
{
"name": "3d-to-photo",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint"
},
"dependencies": {
"@react-three/drei": "^9.65.3",
"@react-three/fiber": "^8.12.0",
"@types/three": "^0.150.1",
"axios": "^1.3.4",
"buffer": "^6.0.3",
"eslint": "8.36.0",
"eslint-config-next": "13.2.4",
"fabric": "^5.3.0",
"form-data": "^4.0.0",
"jimp": "^0.22.10",
"next": "13.2.4",
"next-images": "^1.8.4",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-dropzone": "^14.2.3",
"sharp": "^0.32.6",
"styled-components": "^5.3.9",
"three": "^0.151.3"
}
}

5
pages/_app.js Normal file
View File

@ -0,0 +1,5 @@
import '@/styles/globals.css'
export default function App({ Component, pageProps }) {
return <Component {...pageProps} />
}

13
pages/_document.js Normal file
View File

@ -0,0 +1,13 @@
import { Html, Head, Main, NextScript } from 'next/document'
export default function Document() {
return (
<Html lang="en">
<Head />
<body>
<Main />
<NextScript />
</body>
</Html>
)
}

5
pages/api/hello.js Normal file
View File

@ -0,0 +1,5 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
export default function handler(req, res) {
res.status(200).json({ name: 'John Doe' })
}

View File

@ -0,0 +1,20 @@
export default async function handler(req, res) {
const response = await fetch(
"https://api.replicate.com/v1/predictions/" + req.query.id,
{
headers: {
Authorization: `Token ${process.env.REPLICATE_API_TOKEN}`,
"Content-Type": "application/json",
},
}
);
if (response.status !== 200) {
let error = await response.json();
res.statusCode = 500;
res.end(JSON.stringify({ detail: error.detail }));
return;
}
const prediction = await response.json();
res.end(JSON.stringify(prediction));
}

View File

@ -0,0 +1,33 @@
export default async function handler(req, res) {
console.log(`Token ${process.env.REPLICATE_API_TOKEN}`)
const response = await fetch("https://api.replicate.com/v1/predictions", {
method: "POST",
headers: {
Authorization: `Token ${process.env.REPLICATE_API_TOKEN}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
// Pinned to a specific version of Stable Diffusion
// See https://replicate.com/stability-ai/stable-diffussion/versions
// inpainting
//model: "andreasjansson/stable-diffusion-inpainting",
version: "e490d072a34a94a11e9711ed5a6ba621c3fab884eda1665d9d3a282d65a21180", // SD 1.5 inpainting
//version: "f9bb0632bfdceb83196e85521b9b55895f8ff3d1d3b487fd1973210c0eb30bec", // SD v2 inpainting
// This is the text prompt that will be submitted by a form on the frontend
input: req.body,
}),
});
if (response.status !== 201) {
let error = await response.json();
res.statusCode = 500;
res.end(JSON.stringify({ detail: error.detail }));
return;
}
const prediction = await response.json();
res.statusCode = 201;
res.end(JSON.stringify(prediction));
}

520
pages/index.js Normal file
View File

@ -0,0 +1,520 @@
import { useState, useEffect, useRef } from "react";
import Head from "next/head";
import Image from "next/image";
import styles from "../styles/studio.module.css";
import FormData from 'form-data';
import Dropzone from 'react-dropzone';
import { userAgentFromString } from "next/server";
import TextInput from "@/components/textinput";
import { Canvas, useFrame, useThree } from '@react-three/fiber'
import { GLTFLoader } from 'three/examples/jsm/loaders/GLTFLoader';
import {CanvasComponent} from "@/components/canvascomponent";
/*
links for thereejs implementation
https://codesandbox.io/s/basic-demo-forked-rnuve?file=/src/App.js
*/
const sleep = (ms) => new Promise((r) => setTimeout(r, ms));
export default function Studio(){
// const imageContainer = {
// position: 'absolute',
// top: '0',
// left: '0',
// backgroundColor: 'rgb(70, 232, 83)',
// height: '100%',
// width: '100%',
// };
const [canvasSnapshotUrl, setCanvasSnapshotUrl] = useState(null)
const [maskImageUrl, setMaskImageUrl] = useState(null);
const [uxMaskImageUrl, setUxMaskImageUrl] = useState(null);
const [imageFile, setImageFile] = useState(null)
const [modelFile, setModelFile] = useState(null);
const [modelBuffer, setModelBuffer] = useState(null);
const [gltfModel, setGltfModel] = useState(null);
const [isImgUploadVisible, setIsImgUploadVisible] = useState(null)
const [isMaskVisible, setIsMaskVisible] = useState(null)
const [isResultVisible, setIsResultVisible] = useState(null)
const [isFlashingProgressVisible, setIsFlashingProgressVisible] = useState(null)
const [prediction, setPrediction] = useState(null);
const [error, setError] = useState(null);
const [inputValue, setInputValue] = useState('');
// create a canvas reference in the main state
const [canvasRef, setCanvasRef] = useState(null);
function handleInputValueChange(newInputValue) {
setInputValue(newInputValue);
}
useEffect(() => {
console.log("loaded the page");
// define visibiltiy of the 3 layers
setIsImgUploadVisible(true)
setIsMaskVisible(true)
setIsResultVisible(false)
setIsFlashingProgressVisible(false)
},[]);
const handleDrop = (event) => {
event.preventDefault();
const file = event.dataTransfer.files[0];
if (file && file.name.endsWith('.glb')) {
const reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onload = (event) => {
const arrayBuffer = event.target.result;
const loader = new GLTFLoader();
loader.parse(arrayBuffer, '', (gltf) => {
setGltfModel(gltf);
console.log("gltf model loaded")
console.log('Loaded Scene:', gltf.scene);
}, (error) => {
console.error('ArrayBuffer loading error:', error);
});
};
}
};
const getReplicateResults = async (image, mask) => {
setIsFlashingProgressVisible(true)
let promptText = "beautiful living room"
if (inputValue) {
promptText = inputValue
}
const response = await fetch("/api/predictions", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
//prompt: e.target.prompt.value,
//prompt: "high resolution photography of a beige interior living room with dining chairs, around dining table, wooden floor, beige blue salmon pastel, sunlight, contrast, realistic artstation concept art, hyperdetailed, ultradetail, cinematic 8k, architectural rendering, unreal engine 5, rtx, volumetric light, cozy atmosphere",
//prompt: "minimalist kitchen, wooden floor, beige blue salmon pastel, sunlight, contrast, realistic artstation concept art, hyperdetailed, ultradetail, cinematic 8k, architectural rendering, unreal engine 5, rtx, volumetric light, cozy atmosphere",
prompt: promptText + ", creative marketing advertisement",
negative_prompt: "blurry, painting, cartoon, abstract, ugly, deformed",
image: image,
mask: mask,
num_outputs: 4,
guidance_scale: 7.5,
}),
});
let prediction = await response.json();
if (response.status !== 201) {
setError(prediction.detail);
return;
}
setPrediction(prediction);
while (
prediction.status !== "succeeded" &&
prediction.status !== "failed"
) {
await sleep(1000);
const response = await fetch("/api/predictions/" + prediction.id);
prediction = await response.json();
if (response.status !== 200) {
setError(prediction.detail);
return;
}
if (prediction.status == "succeeded" && prediction.output) {
setIsImgUploadVisible(true)
setIsMaskVisible(true)
setIsResultVisible(true)
setIsFlashingProgressVisible(false)
}
console.log({prediction})
setPrediction(prediction);
}
};
async function generateImages() {
console.log("Called the generate images function")
// Do something with the image data URL
let snapshotImage = capture3DSnapshot()
const formData = new FormData();
formData.append('image', snapshotImage);
if (!snapshotImage) {
console.log("image file is null")
}
// Generate base64 url image for remove bg
try {
const response = await fetch('http://127.0.0.1:5000/get_item_mask', {
method: 'POST',
body: formData
});
// Handle response
// const imageBlob = await response.blob();
// const url = URL.createObjectURL(imageBlob);
// setMaskImageUrl(url)
console.log(response)
const data = await response.json();
//console.log(data.image)
let maskBase64Url = `data:image/jpeg;base64,${data.ai_mask}`
let uxMaskBase64Url = `data:image/jpeg;base64,${data.ux_mask}`
setMaskImageUrl(maskBase64Url)
setUxMaskImageUrl(uxMaskBase64Url)
setIsMaskVisible(true)
setIsImgUploadVisible(true)
setIsResultVisible(false)
let imageURLTemp = "https://images.unsplash.com/photo-1490730141103-6cac27aaab94?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=2940&q=80"
// Generate base64 image for input image.
// Filereader converts a file blob into a base64 string
const reader = new FileReader();
reader.readAsDataURL(snapshotImage);
reader.onload = async () => {
const imageBase64Url = reader.result;
// now send a request to replicate
await getReplicateResults(imageBase64Url ,maskBase64Url)
};
} catch (error) {
console.error(error);
}
// const img = document.createElement('img');
// img.src = url;
// document.body.appendChild(img);
}
function base64ToBlob(base64Image) {
const parts = base64Image.split(';base64,');
const mimeType = parts[0].split(':')[1];
const byteString = atob(parts[1]);
const arrayBuffer = new ArrayBuffer(byteString.length);
const uint8Array = new Uint8Array(arrayBuffer);
for (let i = 0; i < byteString.length; i++) {
uint8Array[i] = byteString.charCodeAt(i);
}
return new Blob([arrayBuffer], { type: mimeType });
}
function capture3DSnapshot() {
const dataUrl = canvasRef.toDataURL("image/png")
setCanvasSnapshotUrl(dataUrl)
const blob = base64ToBlob(dataUrl);
setImageFile(blob)
setIsMaskVisible(false)
setIsImgUploadVisible(true)
setIsResultVisible(false)
return blob
}
//download snapshot
const download3DSnapshot = () => {
const link = document.createElement("a");
link.setAttribute("download", "canvas.png");
link.setAttribute(
"href",
canvasRef.toDataURL("image/png").replace("image/png", "image/octet-stream")
);
link.click();
};
return(
<div className={styles.page}>
{/* threejs container */}
<div className={styles.inputPanel}>
{/* <div className={styles.mainImageContainer}>
{isImgUploadVisible? (
<div
className={styles.imageContainer}
onDrop={handleDrop}
onDragOver={(event) => event.preventDefault()}
>
{imageFile? (
<div>
<Image
src={URL.createObjectURL(imageFile)}
alt="Uploaded image"
fill={true}
/>
</div>
) : (
<div className={styles.dragAndDropText}>Drag and Drop your image here</div>
)}
</div>
): (<></>)
}
<CanvasComponent setCanvasRef={setCanvasRef} />
</div> */}
<div className={styles.mainImageContainer}>
<div
className={styles.imageContainer}
onDrop={handleDrop}
onDragOver={(event) => event.preventDefault()}
>
{gltfModel ? <CanvasComponent setCanvasRef={setCanvasRef} gltfModel={gltfModel} /> : "Drop your GLB model here"}
</div>
</div>
<div><TextInput onTextChange={handleInputValueChange} /></div>
<div className={styles.buttonContainer}>
{/* <div
className={styles.startNewButton}
onClick={()=>setImageFile(null)}
>
Start New
</div> */}
<div
className={styles.generateButton}
onClick={()=>generateImages()}
>
Generate Images
</div>
</div>
</div>
<div className={styles.resultsPanel}>
{/* {isImgUploadVisible? (
<div
className={styles.imageContainer}
onDrop={handleDrop}
onDragOver={(event) => event.preventDefault()}
>
{imageFile? (
<div>
<Image
src={URL.createObjectURL(imageFile)}
alt="Uploaded image"
fill={true}
/>
</div>
) : (
<div className={styles.dragAndDropText}>Drag and Drop your image here. Or click 3D</div>
)}
{canvasSnapshotUrl? (
<div>
<Image
src={canvasSnapshotUrl}
alt="Uploaded image"
fill={true}
/>
</div>
) : (
<></>
)
}
</div>
): (<></>)
} */}
{/* {canvasSnapshotUrl? (
<div
className={`${styles.maskImageContainer} ${canvasSnapshotUrl ? styles.slideDown : ""}`}
>
{canvasSnapshotUrl? (
<Image
src={canvasSnapshotUrl? canvasSnapshotUrl : ""}
alt="output"
fill="true"
hidden={!canvasSnapshotUrl}
/>
):(
<div> </div>
)
}
</div>
) : (
<></>
)} */}
{/* <div
className={`${styles.maskImageContainer} ${isMaskVisible ? styles.slideDown : ""}`}
>
{uxMaskImageUrl? (
<Image
src={uxMaskImageUrl? uxMaskImageUrl : ""}
alt="output"
width="400"
height="400"
objectFit="contain"
hidden={!uxMaskImageUrl}
/>
):(
<div> </div>
)
}
</div> */}
{/* {isFlashingProgressVisible? (
<div
className={styles.flashingProgressContainer}
>
</div>
):(<></>)} */}
{isResultVisible? (
<>
<div>
{prediction.output && (
<div className={styles.imageResultsContainer}>
<div className={styles.imageWrapper}>
<Image
fill
src={prediction.output[0]}
alt="output"
/>
</div>
<div className={styles.imageWrapper}>
<Image
fill
src={prediction.output[1]}
alt="output"
/>
</div>
<div className={styles.imageWrapper}>
<Image
fill
src={prediction.output[2]}
alt="output"
/>
</div>
<div className={styles.imageWrapper}>
<Image
fill
src={prediction.output[3]}
alt="output"
/>
</div>
</div>
)}
</div>
</>
) : (
<></>
)}
{
isFlashingProgressVisible?
(<p>Loading...</p>):(<></>)
}
</div>
{/* <div className={styles.contentPanel}>
{error && <div>{error}</div>}
{prediction && (
<div>
{prediction.output && (
<div className={styles.imageResultsContainer}>
<div className={styles.imageWrapper}>
<Image
fill
src={prediction.output[0]}
alt="output"
/>
</div>
<div className={styles.imageWrapper}>
<Image
fill
src={prediction.output[1]}
alt="output"
/>
</div>
<div className={styles.imageWrapper}>
<Image
fill
src={prediction.output[2]}
alt="output"
/>
</div>
<div className={styles.imageWrapper}>
<Image
fill
src={prediction.output[3]}
alt="output"
/>
</div>
</div>
)}
<p>status: {prediction.status}</p>
</div>
)}
</div> */}
</div>
);
}

BIN
public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

1
public/next.svg Normal file
View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

BIN
public/sneaker.glb Normal file

Binary file not shown.

1
public/thirteen.svg Normal file
View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="40" height="31" fill="none"><g opacity=".9"><path fill="url(#a)" d="M13 .4v29.3H7V6.3h-.2L0 10.5V5L7.2.4H13Z"/><path fill="url(#b)" d="M28.8 30.1c-2.2 0-4-.3-5.7-1-1.7-.8-3-1.8-4-3.1a7.7 7.7 0 0 1-1.4-4.6h6.2c0 .8.3 1.4.7 2 .4.5 1 .9 1.7 1.2.7.3 1.6.4 2.5.4 1 0 1.7-.2 2.5-.5.7-.3 1.3-.8 1.7-1.4.4-.6.6-1.2.6-2s-.2-1.5-.7-2.1c-.4-.6-1-1-1.8-1.4-.8-.4-1.8-.5-2.9-.5h-2.7v-4.6h2.7a6 6 0 0 0 2.5-.5 4 4 0 0 0 1.7-1.3c.4-.6.6-1.3.6-2a3.5 3.5 0 0 0-2-3.3 5.6 5.6 0 0 0-4.5 0 4 4 0 0 0-1.7 1.2c-.4.6-.6 1.2-.6 2h-6c0-1.7.6-3.2 1.5-4.5 1-1.3 2.2-2.3 3.8-3C25 .4 26.8 0 28.8 0s3.8.4 5.3 1.1c1.5.7 2.7 1.7 3.6 3a7.2 7.2 0 0 1 1.2 4.2c0 1.6-.5 3-1.5 4a7 7 0 0 1-4 2.2v.2c2.2.3 3.8 1 5 2.2a6.4 6.4 0 0 1 1.6 4.6c0 1.7-.5 3.1-1.4 4.4a9.7 9.7 0 0 1-4 3.1c-1.7.8-3.7 1.1-5.8 1.1Z"/></g><defs><linearGradient id="a" x1="20" x2="20" y1="0" y2="30.1" gradientUnits="userSpaceOnUse"><stop/><stop offset="1" stop-color="#3D3D3D"/></linearGradient><linearGradient id="b" x1="20" x2="20" y1="0" y2="30.1" gradientUnits="userSpaceOnUse"><stop/><stop offset="1" stop-color="#3D3D3D"/></linearGradient></defs></svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

1
public/vercel.svg Normal file
View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 283 64"><path fill="black" d="M141 16c-11 0-19 7-19 18s9 18 20 18c7 0 13-3 16-7l-7-5c-2 3-6 4-9 4-5 0-9-3-10-7h28v-3c0-11-8-18-19-18zm-9 15c1-4 4-7 9-7s8 3 9 7h-18zm117-15c-11 0-19 7-19 18s9 18 20 18c6 0 12-3 16-7l-8-5c-2 3-5 4-8 4-5 0-9-3-11-7h28l1-3c0-11-8-18-19-18zm-10 15c2-4 5-7 10-7s8 3 9 7h-19zm-39 3c0 6 4 10 10 10 4 0 7-2 9-5l8 5c-3 5-9 8-17 8-11 0-19-7-19-18s8-18 19-18c8 0 14 3 17 8l-8 5c-2-3-5-5-9-5-6 0-10 4-10 10zm83-29v46h-9V5h9zM37 0l37 64H0L37 0zm92 5-27 48L74 5h10l18 30 17-30h10zm59 12v10l-3-1c-6 0-10 4-10 10v15h-9V17h9v9c0-5 6-9 13-9z"/></svg>

After

Width:  |  Height:  |  Size: 629 B

35
styles/Home.module.css Normal file
View File

@ -0,0 +1,35 @@
.container {
padding: 2rem;
font-size: 1.3rem;
max-width: 48rem;
margin: 0 auto;
}
.form {
display: flex;
margin-bottom: 2rem;
}
.form input {
width: 100%;
padding: 1rem;
border: 1px solid #000;
border-radius: 0.25rem;
font-size: 1.3rem;
margin-right: 1rem;
}
.form button {
padding: 1rem;
border: none;
border-radius: 0.25rem;
box-sizing: border-box;
cursor: pointer;
font-size: 1.3rem;
}
.imageWrapper {
width: 100%;
aspect-ratio: 1 / 1;
position: relative
}

111
styles/globals.css Normal file
View File

@ -0,0 +1,111 @@
body {
font-family: sans-serif;
}
/* :root {
--max-width: 1100px;
--border-radius: 12px;
--font-mono: ui-monospace, Menlo, Monaco, 'Cascadia Mono', 'Segoe UI Mono',
'Roboto Mono', 'Oxygen Mono', 'Ubuntu Monospace', 'Source Code Pro',
'Fira Mono', 'Droid Sans Mono', 'Courier New', monospace;
--foreground-rgb: 0, 0, 0;
--background-start-rgb: 214, 219, 220;
--background-end-rgb: 255, 255, 255;
--primary-glow: conic-gradient(
from 180deg at 50% 50%,
#16abff33 0deg,
#0885ff33 55deg,
#54d6ff33 120deg,
#0071ff33 160deg,
transparent 360deg
);
--secondary-glow: radial-gradient(
rgba(255, 255, 255, 1),
rgba(255, 255, 255, 0)
);
--tile-start-rgb: 239, 245, 249;
--tile-end-rgb: 228, 232, 233;
--tile-border: conic-gradient(
#00000080,
#00000040,
#00000030,
#00000020,
#00000010,
#00000010,
#00000080
);
--callout-rgb: 238, 240, 241;
--callout-border-rgb: 172, 175, 176;
--card-rgb: 180, 185, 188;
--card-border-rgb: 131, 134, 135;
}
@media (prefers-color-scheme: dark) {
:root {
--foreground-rgb: 255, 255, 255;
--background-start-rgb: 0, 0, 0;
--background-end-rgb: 0, 0, 0;
--primary-glow: radial-gradient(rgba(1, 65, 255, 0.4), rgba(1, 65, 255, 0));
--secondary-glow: linear-gradient(
to bottom right,
rgba(1, 65, 255, 0),
rgba(1, 65, 255, 0),
rgba(1, 65, 255, 0.3)
);
--tile-start-rgb: 2, 13, 46;
--tile-end-rgb: 2, 5, 19;
--tile-border: conic-gradient(
#ffffff80,
#ffffff40,
#ffffff30,
#ffffff20,
#ffffff10,
#ffffff10,
#ffffff80
);
--callout-rgb: 20, 20, 20;
--callout-border-rgb: 108, 108, 108;
--card-rgb: 100, 100, 100;
--card-border-rgb: 200, 200, 200;
}
}
* {
box-sizing: border-box;
padding: 0;
margin: 0;
}
html,
body {
max-width: 100vw;
overflow-x: hidden;
}
body {
color: rgb(var(--foreground-rgb));
background: linear-gradient(
to bottom,
transparent,
rgb(var(--background-end-rgb))
)
rgb(var(--background-start-rgb));
}
a {
color: inherit;
text-decoration: none;
}
@media (prefers-color-scheme: dark) {
html {
color-scheme: dark;
}
} */

176
styles/studio.module.css Normal file
View File

@ -0,0 +1,176 @@
.page {
background-color: rgb(255, 255, 255);
padding: 50px;
display: flex;
flex-direction: row;
justify-content: center;
}
.inputPanel{
background-color: rgb(255, 255, 255);
justify-content: flex-start;
align-items: center;
padding: 10px;
display: flex;
flex-direction: column;
box-sizing: border-box;
flex: 1;
}
.resultsPanel {
flex: 2;
}
.contentPanel{
background-color: rgb(169, 169, 169);
width: 70%;
height: 100vh;
margin-top: 10px;
}
.mainImageContainer{
position: relative;
height: 400px;
width: 400px;
margin-bottom: 10px;
}
.imageContainer{
position: absolute;
top: 0;
left: 0;
background-color: rgb(215, 215, 215);
height: 100%;
width: 100%;
text-align: center;
display: flex;
justify-content: center;
align-items: center;
font-size: 1rem;
font-family: Verdana, Geneva, Tahoma, sans-serif;
color: grey;
}
.dragAndDropText{
max-width: 50%;
line-height: 1.6rem;
}
.maskImageContainer{
position: absolute;
top: 0;
left: 0;
background-color: rgba(59, 32, 113, 0);
height: 0;
width: 100%;
text-align: center;
overflow: hidden;
transition: height 0.2s ease-in;
z-index: 10;
}
.flashingProgressContainer{
position: absolute;
top: 0;
left: 0;
background-color: rgba(59, 32, 113, 0);
height: 100%;
width: 100%;
overflow: hidden;
z-index: 15;
animation: flash 1s infinite;
}
@keyframes flash {
0% {
background-color: transparent;
}
50% {
background-color: rgba(255, 255, 255, 0.3);
}
100% {
background-color: transparent;
}
}
.resultsImageContainer{
position: absolute;
top: 0;
left: 0;
background-color: rgba(59, 32, 113, 0);
height: 100%;
width: 100%;
text-align: center;
overflow: hidden;
z-index: 20;
}
.slideDown {
height: 400px;
}
.maskDisplayContainer{
position: absolute;
top: 0;
left: 0;
background-color: rgb(70, 232, 83);
height: 100%;
width: 100%;
z-index: 10;
}
.imageWrapper {
background-color: rgb(188, 216, 207);
width: 45%;
aspect-ratio: 1 / 1;
position: relative;
margin: 10px;
}
.imageResultsContainer {
display: flex;
flex-wrap: wrap;
}
.generateButton{
width: 40%;
height: 50px;
border-radius: 10px;
margin-top: 10px;
background-color: rgb(255, 200, 0);
display: flex;
justify-content: center;
align-items: center;
cursor:pointer;
font-size: 1rem;
font-family: Verdana, Geneva, Tahoma, sans-serif;
padding-left: 5px;
padding-right: 5px;
}
.startNewButton{
width: 40%;
height: 50px;
border-radius: 10px;
margin-top: 10px;
background-color: rgb(215, 215, 215);
display: flex;
justify-content: center;
align-items: center;
cursor:pointer;
font-size: 1rem;
font-family: Verdana, Geneva, Tahoma, sans-serif;
padding-left: 5px;
padding-right: 5px;
}
.buttonContainer {
position: relative;
width: 400px;
display: flex;
justify-content: space-between;
}