pip install supabase python -c "import supabase; print('supabase ok')" clear deactivate clear which python which pip python -c "import sys; print(sys.executable)" /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/.venv/bin/python -m pip install --upgrade pip /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/.venv/bin/python -m pip install supabase /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/.venv/bin/python -c "import supabase; print('supabase ok', supabase.__version__)" clear /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/.venv/bin/python -m pip install statsmodels /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/.venv/bin/python -c "import statsmodels.api as sm; print('statsmodels ok')" /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/.venv/bin/python -m pip install scikit-learn /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/.venv/bin/python -m pip install pandas /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/.venv/bin/python -m pip install -r /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/requirements.txt /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/.venv/bin/python -c "import statsmodels.api as sm; print('statsmodels OK')" pip install python-multipart /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend/.venv/bin/python -m pip install python-multipart cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 exit curl -4 ifconfig.me curl http://127.0.0.1:9002/health ping ai-tutor-v2.instituteofanalytics.com sudo apt update sudo apt install -y nginx sudo ufw allow 80 sudo ufw allow 443 sudo ufw reload sudo ufw status cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear continue_prompt = f""" Continue the SAME lesson in detail. Already written pages: {i} Module: {module_label} Level: {learner_level} Write Page {i+1} in markdown - be thorough and detailed. Do NOT repeat previous pages. REQUIREMENTS FOR REMAINING CONTENT: - Each topic in section 3 must have: intuition (2-3 paragraphs), definition, health example with numbers, 6-15 lines Python code, pitfalls, practice questions - Section 4: 3-5 coding tasks with starter code - Section 5: Realistic project scenario with 5-10 column dataset - clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 exit nvidia-smi -L nvidia-smi --query-gpu=name,memory.total,driver_version --format=csv nvidia-smi cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear pip install xgboost clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 CLEAR clear CLEAR clear uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 exit cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear pip install -r requirements.txt clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear exit cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 exit cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 exit cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear ollama --version ollama list ollama pull llama3.1:8b-instruct cd /home/sandhiya/dr-analytics ollama pull llama3.1:8b-instruct python3 -m venv .venv source .venv/bin/activate pip install --upgrade pip nano requirements.txt pip install -r requirements.txt clear mkdir -p src outputs touch src/run.py src/pdf_text.py src/llm_ollama.py src/schemas.py src/dedupe.py config.yaml clear ollama --version ollama list ollama pull llama3.1:8b-instruct mkdir -p data/pdfs clear python src/run.py clear python src/run.py clear python src/run.py clear python src/run.py clear python src/run.py wc -c outputs/DL0111_RAW.txt wc -c outputs/HD0225_RAW.txt head -n 30 outputs/DL0111_RAW.txt head -n 30 outputs/HD0225_RAW.txt clear rm -f outputs/DL0111_RAW.txt outputs/HD0225_RAW.txt rm -f outputs/DL0111.json outputs/HD0225.json python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear python src/run.py clear python src/run.py clear python src/run.py mkdir dr-transcripts cp -r dr-analytics/* dr-transcripts/ ls dr-transcripts cd /home/sandhiya/dr-transcripts ollama ps nvidia-smi cd /home/sandhiya/DR-Transcripts-Claude brew install ollama python3 -m venv .venv source .venv/bin/activate brew install ollama curl -fsSL https://ollama.com/install.sh | sh ollama --version sudo systemctl enable --now ollama ollama pull llama3.2:3b ollama run llama3.2:3b ollama --version ollama list sudo systemctl restart ollama sudo systemctl status ollama --no-pager ollama run llama3.2:3b clear pip install PyPDF2 pandas openpyxl requests numpy clear ollama pull llama3.1:8b ollama pull gemma:7b python3 test_system.py mkdir -p pdf_uploads outputs clear python3 test_system.py cd /home/sandhiya/DR-Transcripts-Claude ollama serve cd ~ ls mkdir dr-transcripts-updated ls cp -a dr-transcripts/. dr-transcripts-updated/ ls dr-transcripts-updated cd /home/sandhiya/dr-transcripts-updated clear rm -f outputs/*.json rm -f outputs/*_RAW.txt source .venv/bin/activate python src/run.py cd /home/sandhiya/dr-transcripts python3 -m venv .venv source .venv/bin/activate rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py ls pip install -r requirements.txt clear ls[B python src/run.py clear python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt pip install -r requirements.txt clear python src/run.py ls -lt outputs | head clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear python src/run.py clear cd /home/sandhiya/dr-transcripts-updated source .venv/bin/activate rm outputs/*.json rm outputs/*_RAW.txt rm outputs/*_PRESENCE_RAW.txt python src/run.py clear rm outputs/*.json rm outputs/*_RAW.txt python src/run.py mkdir dr-transcripts-reasons cp -a dr-transcripts-updated/. dr-transcripts-reasons/ ls ls dr-transcripts-resons ls dr-transcripts-reasons cd /home/sandhiya/dr-transcripts-reasons cd /home/sandhiya/dr-transcripts-reasons-deductive source .venv/bin/activate rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py python heatmap.py cd /home/sandhiya/dr-transcripts-reasons-deductive/src python heatmap.py pip install matplotlib seaborn clear python heatmap.py clear python heatmap.py mkdir dr-transcripts-inductive ls cp -r dr-transcripts-reasons-deductive/* dr-transcripts-inductive/ ls cd /home/sandhiya/dr-transcripts-inductive ls source .venv/bin/activate python3 -m venv .venv source .venv/bin/activate clear python run_inductive.py pip install -r requirements.txt clear python run_inductive.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python run_inductive.py clear python run_inductive.py clear python run_inductive.py clear python run_inductive.py clear ollama pull llama3.3:latest python run_inductive.py exit df -hT mount | head ls -la ~/.local/share/Trash/files/ find . -type d -name ".git" 2>/dev/null df -hT ~ exit mkdir dr-transcripts-inductive-Hybrid ls cp -r dr-transcripts-inductive/* dr-transcripts-inductive-Hybrid/ cd /home/sandhiya/dr-transcripts-inductive-Hybrid python3 -m venv .venv source .venv/bin/activate pip install -r requirements.txt pip install sentence-transformers hdbscan pip install scikit-learn clear python inductive_analysis-hybrid.py cd /home/sandhiya/dr-transcripts-inductive-Hybrid/src python inductive_analysis-hybrid.py cd /home/sandhiya/dr-transcripts-reasons-deductive source .venv/bin/activate python heatmap.py cd /home/sandhiya/dr-transcripts-reasons-deductive/src python heatmap.py mkdir dr-transcripts-inductive cp -r dr-transcripts-inductive-Hybrid/* dr-transcripts-inductive/ ls cd /home/sandhiya/dr-transcripts-inductive-Hybrid/src source .venv/bin/activate python3 -m venv .venv source .venv/bin/activate python inductive_analysis-hybrid.py pip install -r requirements.txt cd.. cd /home/sandhiya/dr-transcripts-inductive-Hybrid pip install -r requirements.txt clear python inductive_analysis-hybrid.py cd /home/sandhiya/dr-transcripts-inductive-Hybrid/src python inductive_analysis-hybrid.py pip install -r requirements.txt cd /home/sandhiya/dr-transcripts-inductive-Hybrid python3 -m venv .venv source .venv/bin/activate cd /home/sandhiya/dr-transcripts-inductive-Hybrid python3 -m venv .venv source .venv/bin/activate python Heatmap_hybrid_inductive.py cd /home/sandhiya/dr-transcripts-inductive-Hybrid/src python Heatmap_hybrid_inductive.py clear python inductive_analysis-hybrid.py clear python inductive_analysis-hybrid.py clear python inductive_analysis-hybrid.py ollama list ollama run llama3.3 ollama serve cd /home/sandhiya/dr-transcripts-reasons-deductive/src python3 -m venv .venv source .venv/bin/activate rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py cd /home/sandhiya/dr-transcripts-reasons-deductive\ cd /home/sandhiya/dr-transcripts-reasons-deductive clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py pip install -r requirements.txt python src/run.py clear python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py cd /home/sandhiya/dr-transcripts-reasons-deductive rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py python3 -m venv .venv source .venv/bin/activate pip install -r requirements.txt python src/run.py clear python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear python src/run.py cd /home/sandhiya/dr-transcripts-reasons-deductive python3 -m venv .venv source .venv/bin/activate rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py cd /home/sandhiya/dr-transcripts-reasons-deductive python3 -m venv .venv source .venv/bin/activate rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py cd /home/sandhiya/dr-transcripts-reasons-deductive python3 -m venv .venv source .venv/bin/activate ollama ps ollama list watch -n 1 nvidia-smi ps aux | grep -i ollama systemctl status ollama --no-pager python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py clear python src/run.py clear rm -f outputs/*.json python src/run.py clear rm -f outputs/*.json python src/run.py clear rm -f outputs/*.json rm -f outputs/*_RAW.txt python src/run.py ollama run llama3.3 nvidia-smi ollama info mkdir -p ~/dr-transcripts-deductive-V1 cd ~/dr-transcripts-deductive-V1 mkdir -p {data/{raw,processed},scripts,outputs/{codes,themes,visualizations},prompts,analysis,logs} source venv/bin/activate python3 -m venv .venv source venv/bin/activate python scripts/01_pdf_extraction.py source .venv/bin/activate python scripts/01_pdf_extraction.py pip install torch transformers langchain pandas numpy matplotlib seaborn pypdf2 pdfplumber textblob plotly kaleido openpyxl scikit-learn ollama langchain-community wordcloud python scripts/01_pdf_extraction.py python scripts/02_preprocessing.py python scripts/03_deductive_coding.py python scripts/04_theme_generation.py clear mkdir -p data/raw data/processed data/processed/chunked mkdir -p outputs/codes outputs/themes outputs/analysis outputs/visualizations mkdir -p prompts logs python scripts/00_test_llm.py python scripts/01_pdf_extraction.py python scripts/02_preprocessing.py python scripts/03_deductive_coding.py cd /home/sandhiya/AI-Tutor/AI-Tutor-V2 python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 pip install -r requirements.txt cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend pip install -r requirements.txt cd ~ mkdir paperbanana_workspace cd paperbanana_workspace git clone https://github.com/dwzhu-pku/PaperBanana.git cd PaperBanana ls python3 -m venv venv source venv/bin/activate ls cd ~/paperbanana_workspace exit cd ~/paperbanana_workspace rm -rf PaperBanana git clone https://github.com/llmsresearch/paperbanana.git cd paperbanana ls ~/paperbanana_workspace/paperbanana python3 -m venv venv source venv/bin/activate ls pip install --upgrade pip pip install -e . export OPENAI_API_KEY="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" cat README.md pip install -e ".[dev,openai]" cp .env.example .env nano .env export $(cat .env | xargs) echo $OPENAI_API_KEY paperbanana --help paperbanana generate --input examples/sample_inputs/transformer_method.txt --caption "Overview of our encoder-decoder architecture with sparse routing" --optimize --auto ping api.openai.com curl https://api.openai.com/v1 curl -v https://api.openai.com/v1 nano .env export $(cat .env | xargs) echo $OPENAI_API_KEY set -a source .env set +a echo $OPENAI_API_KEY | wc -c curl https://api.openai.com/v1/models -H "Authorization: Bearer $OPENAI_API_KEY" paperbanana generate --input examples/sample_inputs/transformer_method.txt --caption "Overview of our encoder-decoder architecture with sparse routing" --optimize --auto cd /home/sandhiya/paperbanana_workspace/paperbanana python3 -m venv venv source venv/bin/activate paperbanana generate --input examples/sample_inputs/my_new_architecture.txt --caption "Multi-Agent RAG Architecture" --optimize --auto set -a source .env set +a echo $OPENAI_API_KEY | wc -c curl https://api.openai.com/v1/models -H "Authorization: Bearer $OPENAI_API_KEY" paperbanana generate --input examples/sample_inputs/my_new_architecture.txt --caption "Multi-Agent RAG Architecture" --optimize --auto source .env echo $VLM_PROVIDER echo $OPENAI_API_KEY paperbanana generate --input examples/sample_inputs/my_new_architecture.txt --caption "Multi-Agent RAG Architecture" --optimize --auto export VLM_PROVIDER=openai export IMAGE_PROVIDER=openai export OPENAI_API_KEY=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 paperbanana generate --input examples/sample_inputs/my_new_architecture.txt --caption "Multi-Agent RAG Architecture" --optimize --auto export IMAGE_PROVIDER=openai_imagen echo $IMAGE_PROVIDER paperbanana generate --input examples/sample_inputs/my_new_architecture.txt --caption "Multi-Agent RAG Architecture" --optimize --auto paperbanana generate --input examples/sample_inputs/cv_evolution.txt --caption "Evolution of Computer Vision Architectures from CNNs to Vision Transformers" --optimize --auto clear paperbanana generate --input examples/sample_inputs/diffusion_model_architecture.txt --caption "Denoising Diffusion Probabilistic Model (DDPM) Architecture" --optimize --max-iterations 4 export $(grep -v '^#' .env | xargs) clear paperbanana generate --input examples/sample_inputs/diffusion_model_architecture.txt --caption "Denoising Diffusion Probabilistic Model (DDPM) Architecture" --optimize --max-iterations 4 paperbanana generate --input examples/sample_inputs/gan_architecture.txt --caption "Generative Adversarial Network (GAN) Architecture" --optimize --max-iterations 3 paperbanana generate --input examples/sample_inputs/multimodal_architecture.txt --caption "Vision-Language Multimodal Transformer Architecture" --optimize --max-iterations 5 paperbanana generate --input examples/sample_inputs/cv_dataset_pipeline.txt --caption "Computer Vision Dataset Creation Pipeline Architecture" --optimize --max-iterations 4 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 --version source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 --version python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cls clear uvicorn app.main:app --host 0.0.0.0 --port 9003 curl http://127.0.0.1:9003/health ``` If it returns `{"status":"ok"}` the backend is fine. Then open your browser and go to exactly: ``` http://127.0.0.1:5504/AI-Tutor/AI-Tutor-V2/frontend/index.html cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 clear uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate uvicorn app.main:app --host 0.0.0.0 --port 9003 cd /home/sandhiya/AI-Tutor/AI-Tutor-V2/backend python3 -m venv .venv source .venv/bin/activate cd /home/sandhiya/Transcripts-Automated/Tr-1 sudo systemctl start docker docker-compose up -d docker-compose ps curl http://localhost:8000/health curl -X POST http://localhost:8000/auth/register -H "Content-Type: application/json" -d '{"email":"test@iausa.com","password":"test123","full_name":"Test User","org_name":"IAUSA"}' curl http://localhost:9002/health curl -X POST http://localhost:9002/auth/register \ curl -X POST http://localhost:9002/auth/login \ psql "postgresql://postgres.jceyhqalyrcafdzmffqx:crmp@1234automated@aws-0-ap-northeast-1.pooler.supabase.com:6543/postgres" sudo apt install postgresql-client -y psql "postgresql://postgres.jceyhqalyrcafdzmffqx:crmp@1234automated@aws-0-ap-northeast-1.pooler.supabase.com:6543/postgres" clear cd /home/sandhiya/Transcripts-Automated/Tr-1 sudo systemctl start docker docker-compose up -d docker-compose ps curl http://localhost:8000/health curl http://localhost:9002/health curl -X POST http://localhost:9002/auth/register \ curl -X POST http://localhost:9002/auth/login \ clear sudo apt install docker-compose -y docker-compose --version sudo apt install docker-compose -y cp .env.example .env python3 -c "import secrets; print(secrets.token_hex(32))" sudo apt install docker-compose -y docker-compose up -d curl http://localhost:8001/health clear cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend ls docker-compose.yml nano .env docker-compose up -d docker-compose ps curl http://localhost:8001/health docker-compose logs app clear docker-compose down docker-compose up -d docker-compose ps curl http://localhost:8001/health docker-compose logs app clear docker-compose down docker-compose build --no-cache docker-compose up -d curl http://localhost:8001/health curl -X POST http://localhost:8001/auth/register -H "Content-Type: application/json" -d '{"email":"test@iausa.com","password":"test123","full_name":"Test User","org_name":"IAUSA"}' curl -X POST http://localhost:8001/auth/login -H "Content-Type: application/x-www-form-urlencoded" -d "username=test@iausa.com&password=test123" ``` ### Test 3 — Check Supabase Go to **Supabase Table Editor → users table** — you should see the test user row appear there. --- ## Sprint 1 Final Checklist ``` ✅ Supabase project created ✅ All 5 tables created ✅ Docker running (Redis + App) ✅ Health endpoint returns ok ✅ API accessible on port 8001 ☐ Register endpoint works ☐ Login returns JWT token ☐ User appears in Supabase Table Editor docker-compose logs app --tail=30 docker-compose logs app --tail=50 docker-compose logs app docker-compose exec app python3 -c " from app.database import engine try: with engine.connect() as conn: print('DB CONNECTION OK') except Exception as e: print(f'DB ERROR: {e}') " claer clear docker-compose exec app env | grep DATABASE_URL docker-compose down docker-compose up -d --force-recreate docker-compose exec app env | grep DATABASE_URL docker-compose exec app python3 -c " from app.database import engine try: with engine.connect() as conn: print('DB CONNECTION OK') except Exception as e: print(f'DB ERROR: {e}') " clear docker-compose down docker-compose up -d --force-recreate docker-compose exec app python3 -c " from app.database import engine try: with engine.connect() as conn: print('DB CONNECTION OK') except Exception as e: print(f'DB ERROR: {e}') " clear docker-compose down docker-compose up -d --force-recreate docker-compose exec app python3 -c " from app.database import engine try: with engine.connect() as conn: print('DB CONNECTION OK') except Exception as e: print(f'DB ERROR: {e}') " docker-compose exec app env | grep DATABASE_URL docker-compose exec app python3 -c " import os url = os.environ.get('DATABASE_URL', 'NOT SET') print('Full URL:', url) # Parse the username from urllib.parse import urlparse parsed = urlparse(url) print('Username:', parsed.username) print('Password:', parsed.password) print('Host:', parsed.hostname) print('Port:', parsed.port) " ``` Paste the output — this will tell us exactly what username and password Supabase is receiving. The `Tenant or user not found` error specifically means the **username format is wrong** for the pooler. The correct username for the Supabase transaction pooler must be exactly: ``` postgres.jceyhqalyrcafdzmffqx clear docker-compose down docker-compose up -d --force-recreate clear docker-compose down docker-compose up -d --force-recreate docker-compose exec app python3 -c " from app.database import engine try: with engine.connect() as conn: print('DB CONNECTION OK') except Exception as e: print(f'DB ERROR: {e}') " clear docker-compose down docker-compose up -d --force-recreate docker-compose exec app python3 -c " from app.database import engine try: with engine.connect() as conn: print('DB CONNECTION OK') except Exception as e: print(f'DB ERROR: {e}') " curl -X POST http://localhost:8001/auth/register -H "Content-Type: application/json" -d '{"email":"test@iausa.com","password":"test123","full_name":"Test User","org_name":"IAUSA"}' docker-compose logs app | tail -50 docker-compose exec app python3 -c " from app.config import settings print('CORS:', settings.CORS_ORIGINS) print('DB:', settings.DATABASE_URL[:50]) " clear docker-compose exec app pip install bcrypt==4.0.1 docker-compose restart app curl -X POST http://localhost:8001/auth/register -H "Content-Type: application/json" -d '{"email":"test@iausa.com","password":"test123","full_name":"Test User","org_name":"IAUSA"}' curl -X POST http://localhost:8001/auth/login -H "Content-Type: application/x-www-form-urlencoded" -d "username=test@iausa.com&password=test123" docker-compose down docker-compose build --no-cache docker-compose up -d clear docker-compose down docker-compose build --no-cache docker-compose up -d curl http://localhost:8001/health clear docker-compose down docker-compose up -d docker-compose ps curl http://localhost:8001/health curl http://localhost:8001/ curl -X POST http://localhost:8001/api/upload -H "Authorization: Bearer invalid" # Login first to get token TOKEN=$(curl -s -X POST http://localhost:8001/auth/login \ -H "Content-Type: application/x-www-form-urlencoded" \ -d "username=test@iausa.com&password=test123" | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])") echo "Token: $TOKEN" cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose logs -f worker cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose down docker-compose up -d docker-compose ps curl http://localhost:8001/health curl http://localhost:8001/ curl -X POST http://localhost:8001/api/upload -H "Authorization: Bearer invalid" # Login first to get token TOKEN=$(curl -s -X POST http://localhost:8001/auth/login \ -H "Content-Type: application/x-www-form-urlencoded" \ -d "username=test@iausa.com&password=test123" | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])") echo "Token: $TOKEN" clear docker-compose logs app | tail -20 curl -v -X POST http://localhost:8001/api/upload -H "Authorization: Bearer invalid" 2>&1 | grep "< HTTP" curl -v -X POST http://localhost:8001/auth/login -H "Content-Type: application/x-www-form-urlencoded" -d "username=test@iausa.com&password=test123" clear docker-compose down docker-compose up -d curl -X POST http://localhost:8001/auth/login -H "Content-Type: application/x-www-form-urlencoded" -d "username=test@iausa.com&password=test123" docker-compose exec app env | grep DATABASE_URL docker-compose logs app | tail -10 clear docker-compose down docker-compose build --no-cache docker-compose up -d docker-compose exec app cat app/database.py | grep replace curl -X POST http://localhost:8001/auth/login -H "Content-Type: application/x-www-form-urlencoded" -d "username=test@iausa.com&password=test123" docker-compose logs app | tail -10 clear docker-compose exec app cat app/database.py | grep replace docker-compose exec app python3 -c " import os url = os.environ.get('DATABASE_URL', '') fixed = url.replace('%40', '@') print('Original:', url[:80]) print('Fixed:', fixed[:80]) " cat Dockerfile docker-compose exec app python3 -c "import app.database; import inspect; print(inspect.getsource(app.database))" clear docker-compose down docker-compose build --no-cache docker-compose up -d curl -X POST http://localhost:8001/auth/login -H "Content-Type: application/x-www-form-urlencoded" -d "username=test@iausa.com&password=test123" docker-compose logs app | tail -20 clear docker-compose exec app python3 -c "import app.database; import inspect; print(inspect.getsource(app.database))" docker-compose exec app python3 -c " from urllib.parse import urlparse, urlunparse, unquote import os url = os.environ.get('DATABASE_URL', '') parsed = urlparse(url) print('username:', parsed.username) print('password raw:', parsed.password) print('hostname:', parsed.hostname) print('port:', parsed.port) decoded_password = unquote(parsed.password or '') print('password decoded:', decoded_password) decoded_netloc = f'{parsed.username}:{decoded_password}@{parsed.hostname}:{parsed.port}' final = urlunparse((parsed.scheme, decoded_netloc, parsed.path, parsed.params, parsed.query, parsed.fragment)) print('final URL:', final[:100]) " clear nano app/database.py docker-compose down docker-compose build --no-cache docker-compose up -d curl -X POST http://localhost:8001/auth/login -H "Content-Type: application/x-www-form-urlencoded" -d "username=test@iausa.com&password=test123" clear TOKEN=$(curl -s -X POST http://localhost:8001/auth/login \ -H "Content-Type: application/x-www-form-urlencoded" \ -d "username=test@iausa.com&password=test123" | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])") echo "Token: $TOKEN" curl -s http://localhost:8001/ | python3 -m json.tool curl -s -o /dev/null -w "%{http_code}" -X POST http://localhost:8001/api/upload -H "Authorization: Bearer $TOKEN" docker-compose logs worker | tail -5 clear grep -A 10 "worker:" docker-compose.yml | grep REDIS clear docker-compose down docker-compose up -d docker-compose logs worker | tail -5 clear docker-compose down docker-compose up -d docker-compose logs worker | tail -5 clear TOKEN=$(curl -s -X POST http://localhost:8001/auth/login \ -H "Content-Type: application/x-www-form-urlencoded" \ -d "username=test@iausa.com&password=test123" | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])") echo "Token: $TOKEN" curl -s http://localhost:8001/auth/me -H "Authorization: Bearer $TOKEN" | python3 -m json.tool curl -X POST http://localhost:8001/api/upload \ clear TOKEN=$(curl -s -X POST http://localhost:8001/auth/login \ -H "Content-Type: application/x-www-form-urlencoded" \ -d "username=test@iausa.com&password=test123" | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])") && echo "Token OK" find /home/sandhiya -name "*.pdf" 2>/dev/null | head -5 ls /home/sandhiya/Transcripts-Automated/ ls clear cat > /tmp/test_transcript.txt << 'EOF' Interviewer: Can you tell me about your experience with your eye care appointments? Patient: I haven't been going as regularly as I should. The clinic is really far from my house and I don't have a car. My son sometimes drives me but he works during the week so it's hard to schedule. Interviewer: What about the cost of the visits? Patient: The copays are difficult. I'm on a fixed income and sometimes I have to choose between my medications and food. I skipped my last two appointments because I couldn't afford the copay. Interviewer: Do you understand why the eye appointments are important? Patient: Honestly I didn't really understand at first. I thought my eyes were fine because I could still see okay. But my doctor explained that the damage happens before you notice it. That really scared me and now I take it more seriously. Interviewer: What helps you remember to take care of your diabetes? Patient: My daughter calls me every morning to remind me to take my insulin. And my doctor is really kind, she always explains everything clearly and never makes me feel stupid for asking questions. That makes a big difference. Interviewer: Is there anything the healthcare system could do better? Patient: I wish there was a van or some kind of transport service. Also the wait times are too long. I waited three hours last time and I had to miss work for that. EOF echo "Test transcript created" curl -X POST http://localhost:8001/api/upload -H "Authorization: Bearer $TOKEN" -F "project_id=03d9766b-7de4-429f-8d85-8c680ab0b42d" -F "file=@/tmp/test_transcript.txt" clear curl -X POST http://localhost:8001/api/jobs/run -H "Authorization: Bearer $TOKEN" -H "Content-Type: application/json" -d '{"transcript_id":"9ba790d7-cc4b-4ce4-884a-3a0dd22a7173","project_id":"03d9766b-7de4-429f-8d85-8c680ab0b42d"}' JOB_ID=$(curl -s -X POST http://localhost:8001/api/jobs/run \ -H "Authorization: Bearer $TOKEN" \ -H "Content-Type: application/json" \ -d '{"transcript_id":"9ba790d7-cc4b-4ce4-884a-3a0dd22a7173","project_id":"03d9766b-7de4-429f-8d85-8c680ab0b42d"}' | python3 -c "import sys,json; print(json.load(sys.stdin)['job_id'])") && echo "Job ID: $JOB_ID" docker-compose logs app | tail -20 clear docker-compose exec app python3 -c "from app.config import settings; print('REDIS:', settings.REDIS_URL)" docker-compose exec app env | grep REDIS clear docker-compose exec app python3 -c " import redis r = redis.from_url('redis://redis:6379/0') print(r.ping()) " docker-compose exec app python3 -c " from app.workers.celery_app import celery_app print('Broker:', celery_app.conf.broker_url) i = celery_app.control.inspect(timeout=2) print('Workers:', i.ping()) " docker-compose exec app python3 -c " from app.workers.pipeline_tasks import run_full_pipeline print('Import OK') " clear docker-compose exec app python3 -c " from app.workers.pipeline_tasks import run_full_pipeline task = run_full_pipeline.delay( transcript_id='9ba790d7-cc4b-4ce4-884a-3a0dd22a7173', project_id='03d9766b-7de4-429f-8d85-8c680ab0b42d' ) print('Task ID:', task.id) print('Status:', task.status) " docker-compose logs app 2>&1 | grep -A 5 "run_job\|jobs/run\|500" clear docker-compose down docker-compose build --no-cache docker-compose up -d docker-compose exec app python3 -c " from app.workers.pipeline_tasks import run_full_pipeline task = run_full_pipeline.delay( transcript_id='9ba790d7-cc4b-4ce4-884a-3a0dd22a7173', project_id='03d9766b-7de4-429f-8d85-8c680ab0b42d' ) print('Task ID:', task.id) " clear docker-compose exec app python3 -c " import os print('REDIS_URL env:', os.environ.get('REDIS_URL')) from app.workers.celery_app import celery_app print('Celery broker:', celery_app.conf.broker_url) print('Celery backend:', celery_app.conf.result_backend) " clear docker-compose down docker-compose build --no-cache docker-compose up -d docker-compose exec app python3 -c " from app.workers.pipeline_tasks import run_full_pipeline task = run_full_pipeline.delay( transcript_id='9ba790d7-cc4b-4ce4-884a-3a0dd22a7173', project_id='03d9766b-7de4-429f-8d85-8c680ab0b42d' ) print('Task ID:', task.id) " clear docker-compose logs worker | tail -20 docker-compose exec app python3 -c " from app.workers.celery_app import celery_app from celery.result import AsyncResult result = AsyncResult('c6b90a58-ee90-4d0d-aa0f-ab985f05f15e', app=celery_app) print('State:', result.state) print('Info:', result.info) " clear curl http://localhost:11434/api/tags ollama serve & ollama list docker-compose exec worker curl http://host.docker.internal:11434/api/tags TOKEN=$(curl -s -X POST http://localhost:8001/auth/login \ -H "Content-Type: application/x-www-form-urlencoded" \ -d "username=test@iausa.com&password=test123" | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])") && echo "Token OK" curl -X POST http://localhost:8001/api/jobs/run -H "Authorization: Bearer $TOKEN" -H "Content-Type: application/json" -d '{"transcript_id":"9ba790d7-cc4b-4ce4-884a-3a0dd22a7173","project_id":"03d9766b-7de4-429f-8d85-8c680ab0b42d"}' clear tailscale ip -4 docker-compose down docker-compose up -d docker-compose exec worker python3 -c " import urllib.request r = urllib.request.urlopen('http://100.x.x.x:11434/api/tags') print('Status:', r.status) " clear docker-compose down docker-compose up -d docker-compose exec worker python3 -c " import urllib.request r = urllib.request.urlopen('http://100.91.168.119:11434/api/tags') print('Ollama reachable! Status:', r.status) " TOKEN=$(curl -s -X POST http://localhost:8001/auth/login \ -H "Content-Type: application/x-www-form-urlencoded" \ -d "username=test@iausa.com&password=test123" | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])") && echo "Token OK" curl -X POST http://localhost:8001/api/jobs/run -H "Authorization: Bearer $TOKEN" -H "Content-Type: application/json" -d '{"transcript_id":"9ba790d7-cc4b-4ce4-884a-3a0dd22a7173","project_id":"03d9766b-7de4-429f-8d85-8c680ab0b42d"}' docker-compose logs -f worker docker-compose down clear docker-compose build --no-cache docker-compose up -d docker-compose logs -f worker cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend TOKEN=$(curl -s -X POST http://localhost:8001/auth/login \ -H "Content-Type: application/x-www-form-urlencoded" \ -d "username=test@iausa.com&password=test123" | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])") && echo "Token OK" curl -X POST http://localhost:8001/api/jobs/run -H "Authorization: Bearer $TOKEN" -H "Content-Type: application/json" -d '{"transcript_id":"9ba790d7-cc4b-4ce4-884a-3a0dd22a7173","project_id":"03d9766b-7de4-429f-8d85-8c680ab0b42d"}' clear curl -s http://localhost:8001/api/jobs/9ef92a34-5767-4fb2-9106-cbbc5bc0954c -H "Authorization: Bearer $TOKEN" | python3 -m json.tool cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose ps curl http://localhost:8001/health docker-compose down docker-compose build --no-cache docker-compose up -d curl http://localhost:8001/health cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose ps curl http://localhost:8001/health docker-compose logs -f worker cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose ps curl http://localhost:8001/health docker-compose logs -f worker clear docker-compose down docker-compose build --no-cache docker-compose up -d curl http://localhost:8001/health clear docker-compose down docker-compose build --no-cache docker-compose up -d cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose ps curl http://localhost:8001/health clear docker-compose exec app python3 -c " from app.database import SessionLocal from app.models.code_result import CodeResult db = SessionLocal() r = db.query(CodeResult).first() print(r.llm_justification[:500] if r.llm_justification else 'No justification') db.close() " docker-compose exec app python3 -c " from app.database import SessionLocal from app.models.code_result import CodeResult db = SessionLocal() r = db.query(CodeResult).first() print(r.llm_justification if r.llm_justification else 'No justification') db.close() " clear docker-compose down docker-compose build --no-cache docker-compose up -d docker-compose ps curl http://localhost:8001/health cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose ps curl http://localhost:8001/health cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose exec app python3 -c " from app.routes.analytics import extract_confidence print('New analytics loaded OK') " cleadr clear TOKEN=$(curl -s -X POST http://localhost:8001/auth/login \ -H "Content-Type: application/x-www-form-urlencoded" \ -d "username=test@iausa.com&password=test123" | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])") curl -s -H "Authorization: Bearer $TOKEN" http://localhost:8001/api/analytics/03d9766b-7de4-429f-8d85-8c680ab0b42d/confusion | python3 -m json.tool docker-compose down docker-compose build --no-cache docker-compose up -d docker-compose logs app | tail -5 docker-compose logs app | tail -10 docker-compose logs app | tail -30 clear docker-compose restart app curl -s -H "Authorization: Bearer $TOKEN" http://localhost:8001/api/analytics/03d9766b-7de4-429f-8d85-8c680ab0b42d/confusion | python3 -m json.tool clear curl -s -H "Authorization: Bearer $TOKEN" http://localhost:8001/api/analytics/03d9766b-7de4-429f-8d85-8c680ab0b42d/confusion docker-compose logs app | tail -15 docker-compose logs app | tail -30 clear sed -n '125,215p' app/routes/analytics.py clear docker-compose restart app curl -s -H "Authorization: Bearer $TOKEN" http://localhost:8001/api/analytics/03d9766b-7de4-429f-8d85-8c680ab0b42d/confusion | python3 -m json.tool docker-compose logs app | tail -30 clear sed -n '200,215p' app/routes/analytics.py clear grep -n "from collections" app/routes/analytics.py sed -n '155,200p' app/routes/analytics.py clear docker-compose down docker-compose build --no-cache docker-compose up -d curl -s -H "Authorization: Bearer $TOKEN" http://localhost:8001/api/analytics/03d9766b-7de4-429f-8d85-8c680ab0b42d/confusion | python3 -m json.tool cd /home/sandhiya/Transcripts-Automated/Tr-1 cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose ps docker-compose logs -f worker clear docker-compose logs -f worker cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose ps docker-compose logs -f worker clear docker-compose down docker-compose build --no-cache docker-compose up -d docker-compose logs app | tail -30 clear docker-compose restart app docker-compose down docker-compose build --no-cache docker-compose up -d cd /home/sandhiya/IOA-NewSite/backend cd ~/IOA-NewSite/frontend ls npm install npm install --legacy-peer-deps clear nano .env npm start clear npm start clear npm start clear npm start clear npm start cat src/index.js grep -r "stripe" src/ --include="*.js" --include="*.jsx" -l clear cat src/App.js cat src/contexts/AuthContext.js clear echo $REACT_APP_BACKEND_URL curl http://localhost:8002/api/auth/me clear echo $REACT_APP_BACKEND_URL curl http://localhost:8002/api/auth/me cd ~/IOA-NewSite/frontend npm run build npx serve -s build -l 3001 cat ~/IOA-NewSite/frontend/build/index.html ls ~/IOA-NewSite/frontend/build/static/js/ clear sed -i 's|||g' ~/IOA-NewSite/frontend/public/index.html npm run build && npx serve -s build -l 3001 clear grep -o "REACT_APP_BACKEND_URL\|localhost:8002\|localhost:3000" ~/IOA-NewSite/frontend/build/static/js/main.463c975b.js clear npm start cd /home/sandhiya/IOA-NewSite/backend sudo systemctl status mongod hostname -I cd /home/sandhiya/IOA-NewSite npm install -g pm2 clear sudo tailscale funnel --bg --https=443 --set-path /ioa-backend http://127.0.0.1:8002 cd ~/IOA-NewSite/frontend npm run build clear cd ~/IOA-NewSite/frontend npx serve -s build -l 3001 clear cd ~/IOA-NewSite/frontend npx serve -s build -l 3001 clear npm run build npx serve -s build -l 3001 clear npm start 2>&1 | head -100 grep -r "REACT_APP_BACKEND_URL\|localhost:8002\|localhost:3000" ~/IOA-NewSite/frontend/src/ --include="*.js" --include="*.jsx" | head -5 cat ~/IOA-NewSite/frontend/.env grep -o "localhost:8002" ~/IOA-NewSite/frontend/src/contexts/AuthContext.js http://localhost:8002/api/auth/me clear cat ~/IOA-NewSite/frontend/.env sudo ufw allow 8002 sudo ufw allow 4000 curl http://192.168.1.157:8002/api/auth/me clear sudo ufw allow 8002 sudo ufw status sudo tailscale funnel --bg --https=443 --set-path /ioa-frontend http://127.0.0.1:4000 cat > ~/IOA-NewSite/frontend/.env << 'EOF' REACT_APP_BACKEND_URL=https://spark-b1b8.tail894d7c.ts.net/ioa-backend REACT_APP_STRIPE_PUBLISHABLE_KEY=pk_live_51TLjXi7Cb7ioE7wR7UExO32MbCFIBYw6D97NmKy52PKGANA6SMCE4QyBCrTDM9mguloaj4q1MENbV8vvspmru1oy00alWWhdSf EOF pwd ls mkdir IOA-NewSite cd IOA-NewSite git clone https://github.com/CRMP-25/IOA-NewSite.git . ls cd backend cat requirements.txt clear python3 --version mongod --version curl -fsSL https://www.mongodb.org/static/pgp/server-7.0.asc | sudo gpg -o /usr/share/keyrings/mongodb-server-7.0.gpg --dearmor mongod --version clear curl -fsSL https://www.mongodb.org/static/pgp/server-7.0.asc | sudo gpg -o /usr/share/keyrings/mongodb-server-7.0.gpg --dearmor mongod --version echo "deb [ arch=amd64,arm64 signed-by=/usr/share/keyrings/mongodb-server-7.0.gpg ] https://repo.mongodb.org/apt/ubuntu jammy/mongodb-org/7.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-7.0.list sudo apt-get update && sudo apt-get install -y mongodb-org clear sudo systemctl start mongod sudo systemctl enable mongod sudo systemctl status mongod nano .env clear cd ~/IOA-NewSite/backend pip install -r requirements.txt --break-system-packages grep -r "emergentintegrations" ~/IOA-NewSite/backend/ sed -i '/emergentintegrations/d' requirements.txt clear pip install -r requirements.txt --break-system-packages clear python3 server.py clear uvicorn server:app --host 0.0.0.0 --port 8000 --reload python3 -m uvicorn server:app --host 0.0.0.0 --port 8000 --reload python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear sed -i 's|Path("/app/memory").mkdir(exist_ok=True)|Path("/home/sandhiya/IOA-NewSite/memory").mkdir(exist_ok=True)|g' server.py sed -i 's|"/app/memory/test_credentials.md"|"/home/sandhiya/IOA-NewSite/memory/test_credentials.md"|g' server.py sed -i 's|Path("/app/memory").mkdir(exist_ok=True)|Path("/home/sandhiya/IOA-NewSite/memory").mkdir(exist_ok=True)|g' server.py sed -i 's|"/app/memory/test_credentials.md"|"/home/sandhiya/IOA-NewSite/memory/test_credentials.md"|g' server.py python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear grep -n "emergentintegrations" ~/IOA-NewSite/backend/server.py sed -n '755,800p' ~/IOA-NewSite/backend/server.py sed -n '670,720p' ~/IOA-NewSite/backend/server.py clear sed -n '955,1010p' ~/IOA-NewSite/backend/server.py clear echo "OPENAI_API_KEY=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" >> ~/IOA-NewSite/backend/.env sed -i 's/from courses_data import COURSES/from courses_data import COURSES\nfrom openai import AsyncOpenAI/' ~/IOA-NewSite/backend/server.py sed -i 's/ADZUNA_APP_KEY = os.environ.get/OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")\nADZUNA_APP_KEY = os.environ.get/' ~/IOA-NewSite/backend/server.py cat > /tmp/fix_chat.py << 'EOF' import re with open('/home/sandhiya/IOA-NewSite/backend/server.py', 'r') as f: content = f.read() # Fix 1: main chat endpoint old1 = ''' from emergentintegrations.llm.chat import LlmChat, UserMessage session_id = input_data.session_id or str(uuid.uuid4()) history = await db.chat_messages.find({"session_id": session_id}, {"_id": 0}).sort("timestamp", 1).to_list(20) courses_summary = "\\n".join([f"Course {c['course_number']}: {c['title']} - {c['description']} (${c['price']}, {c['hours']}hrs)" for c in COURSES]) system_msg = f"""You are an AI academic advisor for the Institute of Analytics USA (TM) - PG Diploma in AI, ML & Data Science Engineering program. You help prospective students understand the program, answer questions about courses, pricing, career outcomes, and enrollment. PROGRAM DETAILS: - 6 comprehensive courses, each $495 (total $2,970 for full diploma) - Each course: 20 hours of lectures + practicals - Cost per lecture hour: ~$24.50 - Expected starting salary for graduates: $80,000+ - ROI: Program pays for itself within the first month of employment COURSES: {courses_summary} PRICING STRATEGY: - Individual course: $495 - Full program bundle: $2,970 (save by enrolling in all 6) - Payment plans available for flexibility - The investment of $2,970 leads to $80k+ annual salary - exceptional ROI Be enthusiastic, knowledgeable, and helpful. Encourage enrollment while being honest and informative. Keep responses concise (2-4 paragraphs max).""" chat = LlmChat( api_key=EMERGENT_LLM_KEY, session_id=session_id, system_message=system_msg ).with_model("openai", "gpt-5.2") user_msg = UserMessage(text=input_data.message) response_text = await chat.send_message(user_msg)''' new1 = ''' session_id = input_data.session_id or str(uuid.uuid4()) history = await db.chat_messages.find({"session_id": session_id}, {"_id": 0}).sort("timestamp", 1).to_list(20) courses_summary = "\\n".join([f"Course {c['course_number']}: {c['title']} - {c['description']} (${c['price']}, {c['hours']}hrs)" for c in COURSES]) system_msg = f"""You are an AI academic advisor for the Institute of Analytics USA (TM) - PG Diploma in AI, ML & Data Science Engineering program. You help prospective students understand the program, answer questions about courses, pricing, career outcomes, and enrollment. PROGRAM DETAILS: - 6 comprehensive courses, each $495 (total $2,970 for full diploma) - Each course: 20 hours of lectures + practicals - Cost per lecture hour: ~$24.50 - Expected starting salary for graduates: $80,000+ - ROI: Program pays for itself within the first month of employment COURSES: {courses_summary} PRICING STRATEGY: - Individual course: $495 - Full program bundle: $2,970 (save by enrolling in all 6) - Payment plans available for flexibility - The investment of $2,970 leads to $80k+ annual salary - exceptional ROI Be enthusiastic, knowledgeable, and helpful. Encourage enrollment while being honest and informative. Keep responses concise (2-4 paragraphs max).""" openai_client = AsyncOpenAI(api_key=OPENAI_API_KEY) messages = [{"role": "system", "content": system_msg}] for h in history: messages.append({"role": h["role"], "content": h["content"]}) messages.append({"role": "user", "content": input_data.message}) completion = await openai_client.chat.completions.create( model="gpt-4o-mini", messages=messages, max_tokens=500 ) response_text = completion.choices[0].message.content''' content = content.replace(old1, new1) with open('/home/sandhiya/IOA-NewSite/backend/server.py', 'w') as f: f.write(content) print("Fix 1 done") EOF python3 /tmp/fix_chat.py clear cat > /tmp/fix_chat2.py << 'EOF' with open('/home/sandhiya/IOA-NewSite/backend/server.py', 'r') as f: content = f.read() # Fix 2: job generation function old2 = ''' from emergentintegrations.llm.chat import LlmChat, UserMessage chat = LlmChat( api_key=EMERGENT_LLM_KEY, session_id=f"job-gen-{uuid.uuid4()}", system_message="""You are a job market research assistant specializing in AI, Machine Learning, and Data Science positions. Generate realistic, current job listings based on what's available in the market. Return ONLY a valid JSON array of job objects. Each object must have these fields: - title (string) - company (string - use real company names known for AI/ML hiring) - location (string) - salary_range (string, e.g. "$80,000 - $120,000") - job_type (string: "full-time", "contract", "remote") - description (string, 2-3 sentences) - requirements (array of strings, 3-5 items) - match_score (number 70-98, representing how well this matches AI/ML/DS graduates) - posted_days_ago (number 1-30) Do NOT include any text before or after the JSON array. Just the raw JSON.""" ).with_model("openai", "gpt-5.2") prompt = f"Generate {input_data.count} current job listings for: {input_data.query} in {input_data.location}. Focus on entry-level to mid-level positions suitable for graduates of an AI/ML/Data Science diploma program." user_msg = UserMessage(text=prompt) response_text = await chat.send_message(user_msg)''' new2 = ''' openai_client = AsyncOpenAI(api_key=OPENAI_API_KEY) prompt = f"Generate {input_data.count} current job listings for: {input_data.query} in {input_data.location}. Focus on entry-level to mid-level positions suitable for graduates of an AI/ML/Data Science diploma program." completion = await openai_client.chat.completions.create( model="gpt-4o-mini", messages=[ {"role": "system", "content": """You are a job market research assistant specializing in AI, Machine Learning, and Data Science positions. Generate realistic, current job listings based on what\'s available in the market. Return ONLY a valid JSON array of job objects. Each object must have these fields: title, company, location, salary_range, job_type, description, requirements, match_score, posted_days_ago. Do NOT include any text before or after the JSON array. Just the raw JSON."""}, {"role": "user", "content": prompt} ], max_tokens=1000 ) response_text = completion.choices[0].message.content''' content = content.replace(old2, new2) # Fix 3: recommendation letter function old3 = ''' from emergentintegrations.llm.chat import LlmChat, UserMessage courses_list = ", ".join(completed_courses) if completed_courses else "Currently enrolled in the PG Diploma program" skills_list = ", ".join(list(set(all_skills))) if all_skills else "AI, ML, Data Science fundamentals" profile = candidate.get("profile", {}) bio = profile.get("bio", "") if profile else "" chat = LlmChat( api_key=EMERGENT_LLM_KEY, session_id=f"letter-{uuid.uuid4()}", system_message="""You are a professional academic administrator writing formal recommendation letters on behalf of the Institute of Analytics USA (TM). Write compelling, professional recommendation letters that highlight the candidate\'s training, skills, and suitability for the target position. The letter should be on formal letterhead style, addressed to the hiring organization, and signed by the Program Director of Institute of Analytics USA (TM). Be specific about the candidate\'s technical competencies based on their completed coursework. Keep it to one page (about 350-450 words). Format the letter properly with date, recipient info, salutation, body paragraphs, and signature block.""" ).with_model("openai", "gpt-5.2") prompt = f"""Generate a professional recommendation letter for the following candidate: CANDIDATE: {candidate.get(\'name\', \'Candidate\')} EMAIL: {candidate.get(\'email\', \'\')} BIO: {bio} COMPLETED COURSES: {courses_list} TECHNICAL SKILLS ACQUIRED: {skills_list} TARGET POSITION: {job.get(\'title\', \'Open Position\')} TARGET COMPANY: {input_data.employer_company or job.get(\'company\', \'Hiring Organization\')} JOB DESCRIPTION: {job.get(\'description\', \'\')} JOB REQUIREMENTS: {\', \'.join(job.get(\'requirements\', []))} ADDITIONAL NOTES FROM PROGRAM DIRECTOR: {input_data.additional_notes or \'None\'} Write the letter as if from the Program Director of Institute of Analytics USA (TM).""" response_text = await chat.send_message(UserMessage(text=prompt))''' new3 = ''' courses_list = ", ".join(completed_courses) if completed_courses else "Currently enrolled in the PG Diploma program" skills_list = ", ".join(list(set(all_skills))) if all_skills else "AI, ML, Data Science fundamentals" profile = candidate.get("profile", {}) bio = profile.get("bio", "") if profile else "" openai_client = AsyncOpenAI(api_key=OPENAI_API_KEY) prompt = f"""Generate a professional recommendation letter for the following candidate: CANDIDATE: {candidate.get("name", "Candidate")} EMAIL: {candidate.get("email", "")} BIO: {bio} COMPLETED COURSES: {courses_list} TECHNICAL SKILLS ACQUIRED: {skills_list} TARGET POSITION: {job.get("title", "Open Position")} TARGET COMPANY: {input_data.employer_company or job.get("company", "Hiring Organization")} JOB DESCRIPTION: {job.get("description", "")} JOB REQUIREMENTS: {", ".join(job.get("requirements", []))} ADDITIONAL NOTES FROM PROGRAM DIRECTOR: {input_data.additional_notes or "None"} Write the letter as if from the Program Director of Institute of Analytics USA (TM).""" completion = await openai_client.chat.completions.create( model="gpt-4o-mini", messages=[ {"role": "system", "content": "You are a professional academic administrator writing formal recommendation letters on behalf of the Institute of Analytics USA (TM). Write compelling, professional recommendation letters. Keep it to one page (about 350-450 words). Format the letter properly with date, recipient info, salutation, body paragraphs, and signature block."}, {"role": "user", "content": prompt} ], max_tokens=800 ) response_text = completion.choices[0].message.content''' content = content.replace(old3, new3) with open('/home/sandhiya/IOA-NewSite/backend/server.py', 'w') as f: f.write(content) print("Fix 2 and 3 done") EOF python3 /tmp/fix_chat2.py clear grep -n "emergentintegrations" ~/IOA-NewSite/backend/server.py clear cat > /tmp/fix_remaining.py << 'EOF' with open('/home/sandhiya/IOA-NewSite/backend/server.py', 'r') as f: lines = f.readlines() for i, line in enumerate(lines): if 'from emergentintegrations.payments.stripe' in line: lines[i] = ' # Stripe temporarily disabled\n' if 'from emergentintegrations.llm.chat import LlmChat, UserMessage' in line: lines[i] = ' # Using OpenAI directly\n' with open('/home/sandhiya/IOA-NewSite/backend/server.py', 'w') as f: f.writelines(lines) print("All emergentintegrations references removed") EOF python3 /tmp/fix_remaining.py grep -n "emergentintegrations" ~/IOA-NewSite/backend/server.py clear python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear python3 -c "import stripe as stripe_lib; print('stripe ok')" python3 -c "import server" 2>&1 clear python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear cat > ~/IOA-NewSite/backend/.env << 'EOF' MONGO_URL=mongodb://localhost:27017 DB_NAME=ioa_database JWT_SECRET=ioa-usa-jwt-secret-2026-spark-dgx-server ADZUNA_APP_ID=3e523679 ADZUNA_APP_KEY=33ae443992c536e101321ad79e3136c1 ADMIN_EMAIL=admin@americaninstituteofanalytics.com ADMIN_PASSWORD=admin123 CORS_ORIGINS=* OPENAI_API_KEY=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 STRIPE_SECRET_KEY=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 FRONTEND_URL=http://192.168.1.157:4000 EOF python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear cat > ~/IOA-NewSite/backend/.env << 'EOF' MONGO_URL=mongodb://localhost:27017 DB_NAME=ioa_database JWT_SECRET=ioa-usa-jwt-secret-2026-spark-dgx-server ADZUNA_APP_ID=3e523679 ADZUNA_APP_KEY=33ae443992c536e101321ad79e3136c1 ADMIN_EMAIL=admin@americaninstituteofanalytics.com ADMIN_PASSWORD=admin123 CORS_ORIGINS=https://spark-b1b8.tail894d7c.ts.net,http://localhost:4000,http://localhost:3000 OPENAI_API_KEY=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 STRIPE_SECRET_KEY=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 FRONTEND_URL=https://spark-b1b8.tail894d7c.ts.net/ioa-frontend EOF clear python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear sed -i 's/response.set_cookie(key="access_token", value=access_token, httponly=True, secure=False, samesite="lax"/response.set_cookie(key="access_token", value=access_token, httponly=True, secure=False, samesite="none"/g' ~/IOA-NewSite/backend/server.py sed -i 's/response.set_cookie(key="refresh_token", value=refresh_token, httponly=True, secure=False, samesite="lax"/response.set_cookie(key="refresh_token", value=refresh_token, httponly=True, secure=False, samesite="none"/g' ~/IOA-NewSite/backend/server.py python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload clear cat > /tmp/fix_cookies.py << 'EOF' with open('/home/sandhiya/IOA-NewSite/backend/server.py', 'r') as f: content = f.read() # Fix cookies to be secure=True for Tailscale HTTPS content = content.replace( 'response.set_cookie(key="access_token", value=access_token, httponly=True, secure=False, samesite="none"', 'response.set_cookie(key="access_token", value=access_token, httponly=True, secure=True, samesite="none"' ) content = content.replace( 'response.set_cookie(key="refresh_token", value=refresh_token, httponly=True, secure=False, samesite="none"', 'response.set_cookie(key="refresh_token", value=refresh_token, httponly=True, secure=True, samesite="none"' ) with open('/home/sandhiya/IOA-NewSite/backend/server.py', 'w') as f: f.write(content) print("Cookie fix done!") EOF python3 /tmp/fix_cookies.py python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload cd ~/IOA-NewSite/frontend zip -r ioa-frontend-build.zip build/ cd /home/sandhiya/IOA-NewSite/backend echo "STRIPE_SECRET_KEY=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" >> ~/IOA-NewSite/backend/.env cd /home/sandhiya/IOA-NewSite/frontend echo "REACT_APP_STRIPE_PUBLISHABLE_KEY=pk_live_51TLjXi7Cb7ioE7wR7UExO32MbCFIBYw6D97NmKy52PKGANA6SMCE4QyBCrTDM9mguloaj4q1MENbV8vvspmru1oy00alWWhdSf" >> ~/IOA-NewSite/frontend/.env cd /home/sandhiya/IOA-NewSite/backend clear sed -n '460,560p' ~/IOA-NewSite/backend/server.py clear cat > /tmp/fix_stripe.py << 'EOF' with open('/home/sandhiya/IOA-NewSite/backend/server.py', 'r') as f: content = f.read() # Add stripe import after other imports content = content.replace( 'from openai import AsyncOpenAI', 'from openai import AsyncOpenAI\nimport stripe as stripe_lib' ) # Add stripe key setup after OPENAI_API_KEY line content = content.replace( 'OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")', 'OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")\nstripe_lib.api_key = os.environ.get("STRIPE_SECRET_KEY", "")' ) # Fix enroll endpoint - replace StripeCheckout usage old_enroll = ''' # Stripe temporarily disabled host_url = str(request.base_url) webhook_url = f"{host_url}api/webhook/stripe" stripe_checkout = StripeCheckout(api_key=STRIPE_API_KEY, webhook_url=webhook_url) checkout_req = CheckoutSessionRequest( amount=float(amount), currency="usd", success_url=success_url, cancel_url=cancel_url, metadata={"user_id": user["id"], "course_slug": slug, "user_email": user["email"]} ) session = await stripe_checkout.create_checkout_session(checkout_req)''' new_enroll = ''' import asyncio loop = asyncio.get_event_loop() session = await loop.run_in_executor(None, lambda: stripe_lib.checkout.Session.create( payment_method_types=["card"], line_items=[{ "price_data": { "currency": "usd", "product_data": {"name": f"IoA USA - {slug}"}, "unit_amount": int(float(amount) * 100), }, "quantity": 1, }], mode="payment", success_url=success_url, cancel_url=cancel_url, metadata={"user_id": user["id"], "course_slug": slug, "user_email": user["email"]} )) session.session_id = session.id''' content = content.replace(old_enroll, new_enroll) # Fix payment status endpoint old_status = ''' # Stripe temporarily disabled host_url = str(request.base_url) webhook_url = f"{host_url}api/webhook/stripe" stripe_checkout = StripeCheckout(api_key=STRIPE_API_KEY, webhook_url=webhook_url) status = await stripe_checkout.get_checkout_status(session_id) new_status = status.payment_status await db.payment_transactions.update_one( {"session_id": session_id}, {"$set": {"payment_status": new_status, "status": status.status, "updated_at": datetime.now(timezone.utc).isoformat()}} )''' new_status = ''' import asyncio loop = asyncio.get_event_loop() stripe_session = await loop.run_in_executor(None, lambda: stripe_lib.checkout.Session.retrieve(session_id)) new_status = stripe_session.payment_status stripe_status = stripe_session.status await db.payment_transactions.update_one( {"session_id": session_id}, {"$set": {"payment_status": new_status, "status": stripe_status, "updated_at": datetime.now(timezone.utc).isoformat()}} )''' content = content.replace(old_status, new_status) # Fix webhook endpoint old_webhook = ''' # Stripe temporarily disabled host_url = str(request.base_url) webhook_url = f"{host_url}api/webhook/stripe" stripe_checkout = StripeCheckout(api_key=STRIPE_API_KEY, webhook_url=webhook_url) try: webhook_response = await stripe_checkout.handle_webhook(body, sig) if webhook_response.payment_status == "paid": session_id = webhook_response.session_id''' new_webhook = ''' webhook_secret = os.environ.get("STRIPE_WEBHOOK_SECRET", "") try: if webhook_secret: event = stripe_lib.Webhook.construct_event(body, sig, webhook_secret) else: import json event = stripe_lib.Event.construct_from(json.loads(body), stripe_lib.api_key) if event.type == "checkout.session.completed": stripe_session = event.data.object payment_status = stripe_session.payment_status if payment_status == "paid": session_id = stripe_session.id''' content = content.replace(old_webhook, new_webhook) with open('/home/sandhiya/IOA-NewSite/backend/server.py', 'w') as f: f.write(content) print("Stripe fix done!") EOF python3 /tmp/fix_stripe.py clear grep -n "StripeCheckout\|CheckoutSessionRequest\|Stripe temporarily" ~/IOA-NewSite/backend/server.py cd ~/IOA-NewSite/frontend cat .env tailscale serve status lsof -i :3000 curl http://localhost:3000 clear grep "emergent" ~/IOA-NewSite/frontend/public/index.html clear grep -o "stripe\|Stripe" ~/IOA-NewSite/frontend/build/static/js/main.463c975b.js | head -20 grep -r "stripe\|Stripe" ~/IOA-NewSite/frontend/src/ --include="*.js" --include="*.jsx" cd ~/IOA-NewSite git diff --name-only git diff frontend/public/index.html kill -9 $(lsof -t -i:3000) cd ~/IOA-NewSite/frontend npm start cd ~/IOA-NewSite git checkout frontend/public/index.html npm start cd ~/IOA-NewSite/frontend npm start kill $(lsof -t -i:3000) cd ~/IOA-NewSite/frontend PORT=4000 npm start clear kill $(lsof -t -i:4000) PORT=4000 npm start clear cd ~/IOA-NewSite/frontend cat > /tmp/check_env.js << 'EOF' const fs = require('fs'); const content = fs.readFileSync('.env', 'utf8'); console.log('ENV file contents:'); console.log(content); EOF node /tmp/check_env.js clear cat > ~/IOA-NewSite/frontend/.env << 'EOF' REACT_APP_BACKEND_URL=http://192.168.1.157:8002 REACT_APP_STRIPE_PUBLISHABLE_KEY=pk_live_51TLjXi7Cb7ioE7wR7UExO32MbCFIBYw6D97NmKy52PKGANA6SMCE4QyBCrTDM9mguloaj4q1MENbV8vvspmru1oy00alWWhdSf EOF kill $(lsof -t -i:4000) PORT=4000 npm start clear PORT=4000 npm start clear PORT=4000 npm start clear PORT=4000 npm start clear PORT=4000 npm start clear PORT=4000 npm start clear kill $(lsof -t -i:4000) PORT=4000 npm start clear cd ~/IOA-NewSite/frontend kill $(lsof -t -i:4000) PORT=4000 npm start npm run build npx serve -s build -l 4000 clear cd ~/IOA-NewSite/frontend kill $(lsof -t -i:4000) npm run build npx serve -s build -l 4000 cd /home/sandhiya/IOA-NewSite/frontend kill $(lsof -t -i:4000) npm run build npx serve -s build -l 4000 cd /home/sandhiya/IOA-NewSite/backend python3 -m uvicorn server:app --host 0.0.0.0 --port 8002 --reload cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose down clear cd /home/sandhiya/Transcripts-Automated/Tr-1 docker-compose down cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose ps clear sudo systemctl start docker sudo systemctl status docker cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker-compose up -d clear docker compose up -d docker compose ps curl http://localhost:8001/health docker compose logs -f worker docker compose ps curl http://localhost:8001/health cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker compose ps cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker compose ps docker compose down docker compose build --no-cache docker compose up -d docker compose logs -f worker docker compose down docker compose build --no-cache docker compose up -d docker compose ps cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend pip install python-docx --break-system-packages docker compose down && docker compose up -d docker compose ps docker compose logs -f worker cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker compose logs -f worker cd /home/sandhiya/Transcripts-Automated/Tr-1/transcriptiq-backend docker compose logs worker | grep "Coding with framework"