fix: enforced dotenv config

This commit is contained in:
rafaelsideguide 2024-09-04 15:57:57 -03:00
parent 3f462eabe9
commit cb8571abad
16 changed files with 33 additions and 4 deletions

View File

@ -1,11 +1,11 @@
import request from "supertest";
import dotenv from "dotenv";
import { configDotenv } from "dotenv";
import {
ScrapeRequest,
ScrapeResponseRequestTest,
} from "../../controllers/v1/types";
dotenv.config();
configDotenv();
const TEST_URL = "http://127.0.0.1:3002";
describe("E2E Tests for v1 API Routes", () => {

View File

@ -5,6 +5,8 @@ import { supabase_service } from "../../../src/services/supabase";
import { Logger } from "../../../src/lib/logger";
import { getCrawl, saveCrawl } from "../../../src/lib/crawl-redis";
import * as Sentry from "@sentry/node";
import { configDotenv } from "dotenv";
configDotenv();
export async function crawlCancelController(req: Request, res: Response) {
try {

View File

@ -6,6 +6,8 @@ import { Logger } from "../../../src/lib/logger";
import { getCrawl, getCrawlJobs } from "../../../src/lib/crawl-redis";
import { supabaseGetJobsById } from "../../../src/lib/supabase-jobs";
import * as Sentry from "@sentry/node";
import { configDotenv } from "dotenv";
configDotenv();
export async function getJobs(ids: string[]) {
const jobs = (await Promise.all(ids.map(x => getScrapeQueue().getJob(x)))).filter(x => x);

View File

@ -5,6 +5,8 @@ import { supabase_service } from "../../services/supabase";
import { Logger } from "../../lib/logger";
import { getCrawl, saveCrawl } from "../../lib/crawl-redis";
import * as Sentry from "@sentry/node";
import { configDotenv } from "dotenv";
configDotenv();
export async function crawlCancelController(req: Request, res: Response) {
try {

View File

@ -3,6 +3,8 @@ import { CrawlStatusParams, CrawlStatusResponse, ErrorResponse, legacyDocumentCo
import { getCrawl, getCrawlExpiry, getCrawlJobs, getDoneJobsOrdered, getDoneJobsOrderedLength } from "../../lib/crawl-redis";
import { getScrapeQueue } from "../../services/queue-service";
import { supabaseGetJobById, supabaseGetJobsById } from "../../lib/supabase-jobs";
import { configDotenv } from "dotenv";
configDotenv();
export async function getJob(id: string) {
const job = await getScrapeQueue().getJob(id);

View File

@ -1,3 +1,6 @@
import { configDotenv } from "dotenv";
configDotenv();
enum LogLevel {
NONE = 'NONE', // No logs will be output.
ERROR = 'ERROR', // For logging error messages that indicate a failure in a specific operation.

View File

@ -2,6 +2,8 @@ import { Job } from "bullmq";
import type { baseScrapers } from "../scraper/WebScraper/single_url";
import { supabase_service as supabase } from "../services/supabase";
import { Logger } from "./logger";
import { configDotenv } from "dotenv";
configDotenv();
export type ScrapeErrorEvent = {
type: "error",

View File

@ -1,6 +1,8 @@
import { AuthResponse } from "../../src/types";
import { Logger } from "./logger";
import * as Sentry from "@sentry/node";
import { configDotenv } from "dotenv";
configDotenv();
let warningCount = 0;

View File

@ -12,6 +12,8 @@ import { Document } from "../lib/entities";
import { supabase_service } from "../services/supabase";
import { Logger } from "../lib/logger";
import { ScrapeEvents } from "../lib/scrape-events";
import { configDotenv } from "dotenv";
configDotenv();
export async function startWebScraperPipeline({
job,

View File

@ -1,6 +1,7 @@
import { supabase_service } from "../supabase";
import { Logger } from "../../../src/lib/logger";
import "dotenv/config";
import { configDotenv } from "dotenv";
configDotenv();
export async function logCrawl(job_id: string, team_id: string) {
const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true';

View File

@ -4,6 +4,8 @@ import { FirecrawlJob } from "../../types";
import { posthog } from "../posthog";
import "dotenv/config";
import { Logger } from "../../lib/logger";
import { configDotenv } from "dotenv";
configDotenv();
export async function logJob(job: FirecrawlJob) {
try {

View File

@ -3,6 +3,8 @@ import { ScrapeLog } from "../../types";
import { supabase_service } from "../supabase";
import { PageOptions } from "../../lib/entities";
import { Logger } from "../../lib/logger";
import { configDotenv } from "dotenv";
configDotenv();
export async function logScrape(
scrapeLog: ScrapeLog,

View File

@ -36,6 +36,8 @@ import {
} from "../../src/lib/job-priority";
import { PlanType } from "../types";
import { getJobs } from "../../src/controllers/v1/crawl-status";
import { configDotenv } from "dotenv";
configDotenv();
if (process.env.ENV === "production") {
initSDK({

View File

@ -1,5 +1,7 @@
import { createClient, SupabaseClient } from "@supabase/supabase-js";
import { Logger } from "../lib/logger";
import { configDotenv } from "dotenv";
configDotenv();
// SupabaseService class initializes the Supabase client conditionally based on environment variables.
class SupabaseService {

View File

@ -3,6 +3,8 @@ import { legacyDocumentConverter } from "../../src/controllers/v1/types";
import { Logger } from "../../src/lib/logger";
import { supabase_service } from "./supabase";
import { WebhookEventType } from "../types";
import { configDotenv } from "dotenv";
configDotenv();
export const callWebhook = async (
teamId: string,

View File

@ -1,5 +1,6 @@
import { createClient, SupabaseClient } from "@supabase/supabase-js";
import "dotenv/config";
import { configDotenv } from "dotenv";
configDotenv();
// SupabaseService class initializes the Supabase client conditionally based on environment variables.
class SupabaseService {