The Wulf's Den

Gemma 3: no wait / Chatgpt-4o-Latest: no wait / GPT-4o Mini / 3.5 Turbo: no wait / GPT-4: no wait / GPT-4 Turbo: no wait / GPT-4o: no wait / Gemini Flash: no wait / Gemini Pro: no wait / Mistral 7B: no wait / Mistral Nemo: no wait / Mistral Medium: no wait / Mistral Large: no wait / Deepseek: no wait / OpenAI o3 mini: no wait

Welcome to The Wulf's Den

Small disclaimer: streaming does not currently work well with Gemini, so if you experience any issues/errors please disable streaming. and context is seemingly broken a bit on Gemini as well, so anything over 30k tokens/context you might recieve errors, there is no fix for this as of yet. streaming should be prefectly fine with everything else.


Service Info

{
  "uptime": 594196,
  "endpoints": {
    "openai": "https://wulfs-den.ink/proxy/openai",
    "google-ai": "https://wulfs-den.ink/proxy/google-ai",
    "mistral-ai": "https://wulfs-den.ink/proxy/mistral-ai",
    "deepseek": "https://wulfs-den.ink/proxy/deepseek",
    "universal": "https://wulfs-den.ink/proxy"
  },
  "proompts": 78829,
  "tookens": "1.924b",
  "proomptsTotal": 1695781,
  "proomptersNow": 11,
  "tookensTotal": "31.119b",
  "openaiKeys": 21,
  "openaiOrgs": 21,
  "google-aiKeys": 901,
  "mistral-aiKeys": 3,
  "deepseekKeys": 80,
  "turbo": {
    "usage": "0 tokens",
    "activeKeys": 17,
    "revokedKeys": 1,
    "overQuotaKeys": 3,
    "trialKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4-turbo": {
    "usage": "308.1k tokens",
    "activeKeys": 17,
    "overQuotaKeys": 3,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4": {
    "usage": "0 tokens",
    "activeKeys": 17,
    "overQuotaKeys": 3,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "chatgpt-4o-latest": {
    "usage": "15.78m tokens",
    "activeKeys": 17,
    "overQuotaKeys": 3,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4o": {
    "usage": "3.00m tokens",
    "activeKeys": 17,
    "overQuotaKeys": 3,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "o3-mini": {
    "usage": "0 tokens",
    "activeKeys": 15,
    "overQuotaKeys": 2,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gemini-pro": {
    "usage": "1.809b tokens",
    "activeKeys": 901,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gemini-flash": {
    "usage": "13.82m tokens",
    "activeKeys": 901,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gemma-3": {
    "usage": "769.0k tokens",
    "activeKeys": 901,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-small": {
    "usage": "62.1k tokens",
    "activeKeys": 3,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-tiny": {
    "usage": "14.2k tokens",
    "activeKeys": 3,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-medium": {
    "usage": "0 tokens",
    "activeKeys": 3,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-large": {
    "usage": "2.66m tokens",
    "activeKeys": 3,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "deepseek": {
    "usage": "77.78m tokens",
    "activeKeys": 80,
    "revokedKeys": 0,
    "overQuotaKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "config": {
    "page_body": "200",
    "gatekeeper": "user_token",
    "captchaMode": "none",
    "powTokenPrompt": "100",
    "powTokenMaxIps": "2",
    "powDifficultyLevel": "low",
    "powChallengeTimeout": "30",
    "textModelRateLimit": "4",
    "imageModelRateLimit": "4",
    "maxContextTokensOpenAI": "72000",
    "maxContextTokensAnthropic": "90000",
    "maxOutputTokensOpenAI": "8192",
    "maxOutputTokensAnthropic": "8192",
    "maxOutputTokensGoogle": "16384",
    "universalEndpoint": "true",
    "rejectMessage": "This content violates /aicg/'s acceptable use policy.",
    "hashIp": "false",
    "allowAwsLogging": "false",
    "promptLogging": "false",
    "tokenQuota": {
      "chatgpt-4o-latest": "0",
      "turbo": "0",
      "gpt4": "0",
      "gpt4-32k": "0",
      "gpt4-turbo": "0",
      "gpt4o": "0",
      "gpt45": "0",
      "o1": "0",
      "o1-mini": "0",
      "o3-mini": "0",
      "dall-e": "0",
      "claude": "0",
      "claude-opus": "0",
      "gemma-3": "0",
      "gemini-flash": "0",
      "gemini-pro": "0",
      "gemini-ultra": "0",
      "mistral-tiny": "0",
      "mistral-small": "0",
      "mistral-medium": "0",
      "mistral-large": "0",
      "aws-claude": "0",
      "aws-claude-opus": "0",
      "aws-mistral-tiny": "0",
      "aws-mistral-small": "0",
      "aws-mistral-medium": "0",
      "aws-mistral-large": "0",
      "gcp-claude": "0",
      "gcp-claude-opus": "0",
      "azure-turbo": "0",
      "azure-gpt4": "0",
      "azure-gpt4-32k": "0",
      "azure-gpt4-turbo": "0",
      "azure-gpt4o": "0",
      "azure-gpt45": "0",
      "azure-dall-e": "0",
      "azure-o1": "0",
      "azure-o1-mini": "0",
      "azure-o3-mini": "0",
      "deepseek": "0"
    },
    "allowOpenAIToolUsage": "true",
    "allowedVisionServices": "google-ai,openai,mistral-ai",
    "tokensPunishmentFactor": "0"
  },
  "build": "aaf0ae5 (main@subzerowolf1/oai-reverse-proxy)"
}