File size: 1,611 Bytes
15a5288
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import { BaseProviderFetcher } from './base';
import { ProviderEntry, TogetherModel } from './types';

export class TogetherFetcher extends BaseProviderFetcher {
  name = 'together';

  constructor(apiKey?: string) {
    super('https://api.together.ai', apiKey, {
      requestsPerMinute: 600  // Together rate limit from spec
    });
  }

  async fetchModels(): Promise<ProviderEntry[]> {
    try {
      const response = await this.fetchWithRetry<TogetherModel[]>(
        `${this.baseUrl}/v1/models`
      );

      return response.map(model => this.mapModelToProviderEntry(model));
    } catch (error) {
      console.error(`Failed to fetch Together models: ${error}`);
      return [];
    }
  }

  private mapModelToProviderEntry(model: TogetherModel): ProviderEntry {
    const entry: ProviderEntry = {
      provider: this.name,
      context_length: model.context_length,
      pricing: this.normalizePricing(
        model.pricing.input,
        model.pricing.output,
        'per_million'
      ),
      owned_by: model.organization,
      model_type: model.type
    };

    // Parse supported parameters from config if available
    if (model.config) {
      const configParams = this.parseConfigParameters(model.config);
      Object.assign(entry, configParams);
    }


    return entry;
  }

  private parseConfigParameters(config: TogetherModel['config']): Partial<ProviderEntry> {
    const result: Partial<ProviderEntry> = {};

    // Check for stop sequences support
    if (config.stop && config.stop.length > 0) {
      result.supports_stop_sequences = true;
    }

    return result;
  }
}