mirror of
https://github.com/QuantumNous/new-api.git
synced 2026-03-30 05:02:17 +00:00
feat: sync field
This commit is contained in:
@@ -34,7 +34,7 @@ type ConditionOperation struct {
|
||||
|
||||
type ParamOperation struct {
|
||||
Path string `json:"path"`
|
||||
Mode string `json:"mode"` // delete, set, move, copy, prepend, append, trim_prefix, trim_suffix, ensure_prefix, ensure_suffix, trim_space, to_lower, to_upper, replace, regex_replace, return_error, prune_objects, set_header, delete_header, copy_header, move_header
|
||||
Mode string `json:"mode"` // delete, set, move, copy, prepend, append, trim_prefix, trim_suffix, ensure_prefix, ensure_suffix, trim_space, to_lower, to_upper, replace, regex_replace, return_error, prune_objects, set_header, delete_header, copy_header, move_header, sync_fields
|
||||
Value interface{} `json:"value"`
|
||||
KeepOrigin bool `json:"keep_origin"`
|
||||
From string `json:"from,omitempty"`
|
||||
@@ -494,6 +494,11 @@ func applyOperations(jsonStr string, operations []ParamOperation, conditionConte
|
||||
if err == nil {
|
||||
contextJSON, err = marshalContextJSON(context)
|
||||
}
|
||||
case "sync_fields":
|
||||
result, err = syncFieldsBetweenTargets(result, context, op.From, op.To)
|
||||
if err == nil {
|
||||
contextJSON, err = marshalContextJSON(context)
|
||||
}
|
||||
default:
|
||||
return "", fmt.Errorf("unknown operation: %s", op.Mode)
|
||||
}
|
||||
@@ -673,6 +678,119 @@ func deleteHeaderOverrideInContext(context map[string]interface{}, headerName st
|
||||
return nil
|
||||
}
|
||||
|
||||
type syncTarget struct {
|
||||
kind string
|
||||
key string
|
||||
}
|
||||
|
||||
func parseSyncTarget(spec string) (syncTarget, error) {
|
||||
raw := strings.TrimSpace(spec)
|
||||
if raw == "" {
|
||||
return syncTarget{}, fmt.Errorf("sync_fields target is required")
|
||||
}
|
||||
|
||||
idx := strings.Index(raw, ":")
|
||||
if idx < 0 {
|
||||
// Backward compatibility: treat bare value as JSON path.
|
||||
return syncTarget{
|
||||
kind: "json",
|
||||
key: raw,
|
||||
}, nil
|
||||
}
|
||||
|
||||
kind := strings.ToLower(strings.TrimSpace(raw[:idx]))
|
||||
key := strings.TrimSpace(raw[idx+1:])
|
||||
if key == "" {
|
||||
return syncTarget{}, fmt.Errorf("sync_fields target key is required: %s", raw)
|
||||
}
|
||||
|
||||
switch kind {
|
||||
case "json", "body":
|
||||
return syncTarget{
|
||||
kind: "json",
|
||||
key: key,
|
||||
}, nil
|
||||
case "header":
|
||||
return syncTarget{
|
||||
kind: "header",
|
||||
key: key,
|
||||
}, nil
|
||||
default:
|
||||
return syncTarget{}, fmt.Errorf("sync_fields target prefix is invalid: %s", raw)
|
||||
}
|
||||
}
|
||||
|
||||
func readSyncTargetValue(jsonStr string, context map[string]interface{}, target syncTarget) (interface{}, bool, error) {
|
||||
switch target.kind {
|
||||
case "json":
|
||||
path := processNegativeIndex(jsonStr, target.key)
|
||||
value := gjson.Get(jsonStr, path)
|
||||
if !value.Exists() || value.Type == gjson.Null {
|
||||
return nil, false, nil
|
||||
}
|
||||
if value.Type == gjson.String && strings.TrimSpace(value.String()) == "" {
|
||||
return nil, false, nil
|
||||
}
|
||||
return value.Value(), true, nil
|
||||
case "header":
|
||||
value, ok := getHeaderValueFromContext(context, target.key)
|
||||
if !ok || strings.TrimSpace(value) == "" {
|
||||
return nil, false, nil
|
||||
}
|
||||
return value, true, nil
|
||||
default:
|
||||
return nil, false, fmt.Errorf("unsupported sync_fields target kind: %s", target.kind)
|
||||
}
|
||||
}
|
||||
|
||||
func writeSyncTargetValue(jsonStr string, context map[string]interface{}, target syncTarget, value interface{}) (string, error) {
|
||||
switch target.kind {
|
||||
case "json":
|
||||
path := processNegativeIndex(jsonStr, target.key)
|
||||
nextJSON, err := sjson.Set(jsonStr, path, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return nextJSON, nil
|
||||
case "header":
|
||||
if err := setHeaderOverrideInContext(context, target.key, value, false); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return jsonStr, nil
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported sync_fields target kind: %s", target.kind)
|
||||
}
|
||||
}
|
||||
|
||||
func syncFieldsBetweenTargets(jsonStr string, context map[string]interface{}, fromSpec string, toSpec string) (string, error) {
|
||||
fromTarget, err := parseSyncTarget(fromSpec)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
toTarget, err := parseSyncTarget(toSpec)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
fromValue, fromExists, err := readSyncTargetValue(jsonStr, context, fromTarget)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
toValue, toExists, err := readSyncTargetValue(jsonStr, context, toTarget)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// If one side exists and the other side is missing, sync the missing side.
|
||||
if fromExists && !toExists {
|
||||
return writeSyncTargetValue(jsonStr, context, toTarget, fromValue)
|
||||
}
|
||||
if toExists && !fromExists {
|
||||
return writeSyncTargetValue(jsonStr, context, fromTarget, toValue)
|
||||
}
|
||||
return jsonStr, nil
|
||||
}
|
||||
|
||||
func ensureMapKeyInContext(context map[string]interface{}, key string) map[string]interface{} {
|
||||
if context == nil {
|
||||
return map[string]interface{}{}
|
||||
|
||||
@@ -1057,6 +1057,113 @@ func TestApplyParamOverrideCopyHeaderFromRequestHeaders(t *testing.T) {
|
||||
assertJSONEqual(t, `{"temperature":0.1}`, string(out))
|
||||
}
|
||||
|
||||
func TestApplyParamOverrideSyncFieldsHeaderToJSON(t *testing.T) {
|
||||
input := []byte(`{"model":"gpt-4"}`)
|
||||
override := map[string]interface{}{
|
||||
"operations": []interface{}{
|
||||
map[string]interface{}{
|
||||
"mode": "sync_fields",
|
||||
"from": "header:session_id",
|
||||
"to": "json:prompt_cache_key",
|
||||
},
|
||||
},
|
||||
}
|
||||
ctx := map[string]interface{}{
|
||||
"request_headers_raw": map[string]interface{}{
|
||||
"session_id": "sess-123",
|
||||
},
|
||||
"request_headers": map[string]interface{}{
|
||||
"session_id": "sess-123",
|
||||
},
|
||||
}
|
||||
|
||||
out, err := ApplyParamOverride(input, override, ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("ApplyParamOverride returned error: %v", err)
|
||||
}
|
||||
assertJSONEqual(t, `{"model":"gpt-4","prompt_cache_key":"sess-123"}`, string(out))
|
||||
}
|
||||
|
||||
func TestApplyParamOverrideSyncFieldsJSONToHeader(t *testing.T) {
|
||||
input := []byte(`{"model":"gpt-4","prompt_cache_key":"cache-abc"}`)
|
||||
override := map[string]interface{}{
|
||||
"operations": []interface{}{
|
||||
map[string]interface{}{
|
||||
"mode": "sync_fields",
|
||||
"from": "header:session_id",
|
||||
"to": "json:prompt_cache_key",
|
||||
},
|
||||
},
|
||||
}
|
||||
ctx := map[string]interface{}{}
|
||||
|
||||
out, err := ApplyParamOverride(input, override, ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("ApplyParamOverride returned error: %v", err)
|
||||
}
|
||||
assertJSONEqual(t, `{"model":"gpt-4","prompt_cache_key":"cache-abc"}`, string(out))
|
||||
|
||||
headers, ok := ctx["header_override"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("expected header_override context map")
|
||||
}
|
||||
if headers["session_id"] != "cache-abc" {
|
||||
t.Fatalf("expected session_id to be synced from prompt_cache_key, got: %v", headers["session_id"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyParamOverrideSyncFieldsNoChangeWhenBothExist(t *testing.T) {
|
||||
input := []byte(`{"model":"gpt-4","prompt_cache_key":"cache-body"}`)
|
||||
override := map[string]interface{}{
|
||||
"operations": []interface{}{
|
||||
map[string]interface{}{
|
||||
"mode": "sync_fields",
|
||||
"from": "header:session_id",
|
||||
"to": "json:prompt_cache_key",
|
||||
},
|
||||
},
|
||||
}
|
||||
ctx := map[string]interface{}{
|
||||
"request_headers_raw": map[string]interface{}{
|
||||
"session_id": "cache-header",
|
||||
},
|
||||
"request_headers": map[string]interface{}{
|
||||
"session_id": "cache-header",
|
||||
},
|
||||
}
|
||||
|
||||
out, err := ApplyParamOverride(input, override, ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("ApplyParamOverride returned error: %v", err)
|
||||
}
|
||||
assertJSONEqual(t, `{"model":"gpt-4","prompt_cache_key":"cache-body"}`, string(out))
|
||||
|
||||
headers, _ := ctx["header_override"].(map[string]interface{})
|
||||
if headers != nil {
|
||||
if _, exists := headers["session_id"]; exists {
|
||||
t.Fatalf("expected no override when both sides already have value")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyParamOverrideSyncFieldsInvalidTarget(t *testing.T) {
|
||||
input := []byte(`{"model":"gpt-4"}`)
|
||||
override := map[string]interface{}{
|
||||
"operations": []interface{}{
|
||||
map[string]interface{}{
|
||||
"mode": "sync_fields",
|
||||
"from": "foo:session_id",
|
||||
"to": "json:prompt_cache_key",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
_, err := ApplyParamOverride(input, override, nil)
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyParamOverrideSetHeaderKeepOrigin(t *testing.T) {
|
||||
input := []byte(`{"temperature":0.7}`)
|
||||
override := map[string]interface{}{
|
||||
|
||||
@@ -58,6 +58,7 @@ const OPERATION_MODE_OPTIONS = [
|
||||
{ label: 'JSON · to_upper', value: 'to_upper' },
|
||||
{ label: 'Control · return_error', value: 'return_error' },
|
||||
{ label: 'Control · prune_objects', value: 'prune_objects' },
|
||||
{ label: 'Control · sync_fields', value: 'sync_fields' },
|
||||
{ label: 'Header · set_header', value: 'set_header' },
|
||||
{ label: 'Header · delete_header', value: 'delete_header' },
|
||||
{ label: 'Header · copy_header', value: 'copy_header' },
|
||||
@@ -101,6 +102,7 @@ const MODE_META = {
|
||||
to_upper: { path: true },
|
||||
return_error: { value: true },
|
||||
prune_objects: { pathOptional: true, value: true },
|
||||
sync_fields: { from: true, to: true },
|
||||
set_header: { path: true, value: true, keepOrigin: true },
|
||||
delete_header: { path: true },
|
||||
copy_header: { from: true, to: true, keepOrigin: true, pathAlias: true },
|
||||
@@ -124,9 +126,16 @@ const FROM_REQUIRED_MODES = new Set([
|
||||
'regex_replace',
|
||||
'copy_header',
|
||||
'move_header',
|
||||
'sync_fields',
|
||||
]);
|
||||
|
||||
const TO_REQUIRED_MODES = new Set(['copy', 'move', 'copy_header', 'move_header']);
|
||||
const TO_REQUIRED_MODES = new Set([
|
||||
'copy',
|
||||
'move',
|
||||
'copy_header',
|
||||
'move_header',
|
||||
'sync_fields',
|
||||
]);
|
||||
|
||||
const MODE_DESCRIPTIONS = {
|
||||
set: 'Set JSON value at path',
|
||||
@@ -146,12 +155,18 @@ const MODE_DESCRIPTIONS = {
|
||||
to_upper: 'Convert string to upper case',
|
||||
return_error: 'Stop processing and return custom error',
|
||||
prune_objects: 'Remove objects matching conditions',
|
||||
sync_fields: 'Sync two fields when one exists and the other is missing',
|
||||
set_header: 'Set runtime override header',
|
||||
delete_header: 'Delete runtime override header',
|
||||
copy_header: 'Copy header from from -> to',
|
||||
move_header: 'Move header from from -> to',
|
||||
};
|
||||
|
||||
const SYNC_TARGET_TYPE_OPTIONS = [
|
||||
{ label: 'JSON', value: 'json' },
|
||||
{ label: 'Header', value: 'header' },
|
||||
];
|
||||
|
||||
const OPERATION_PATH_SUGGESTIONS = [
|
||||
'model',
|
||||
'temperature',
|
||||
@@ -353,6 +368,13 @@ const PARAM_OVERRIDE_JSON_SCHEMA = {
|
||||
},
|
||||
then: { required: ['value'] },
|
||||
},
|
||||
{
|
||||
if: {
|
||||
properties: { mode: { const: 'sync_fields' } },
|
||||
required: ['mode'],
|
||||
},
|
||||
then: { required: ['from', 'to'] },
|
||||
},
|
||||
{
|
||||
if: {
|
||||
properties: { mode: { const: 'set_header' } },
|
||||
@@ -415,6 +437,26 @@ const parseLooseValue = (valueText) => {
|
||||
}
|
||||
};
|
||||
|
||||
const parseSyncTargetSpec = (spec) => {
|
||||
const raw = String(spec ?? '').trim();
|
||||
if (!raw) return { type: 'json', key: '' };
|
||||
const idx = raw.indexOf(':');
|
||||
if (idx < 0) return { type: 'json', key: raw };
|
||||
const prefix = raw.slice(0, idx).trim().toLowerCase();
|
||||
const key = raw.slice(idx + 1).trim();
|
||||
if (prefix === 'header') {
|
||||
return { type: 'header', key };
|
||||
}
|
||||
return { type: 'json', key };
|
||||
};
|
||||
|
||||
const buildSyncTargetSpec = (type, key) => {
|
||||
const normalizedType = type === 'header' ? 'header' : 'json';
|
||||
const normalizedKey = String(key ?? '').trim();
|
||||
if (!normalizedKey) return '';
|
||||
return `${normalizedType}:${normalizedKey}`;
|
||||
};
|
||||
|
||||
const normalizeCondition = (condition = {}) => ({
|
||||
id: nextLocalId(),
|
||||
path: typeof condition.path === 'string' ? condition.path : '',
|
||||
@@ -1028,6 +1070,14 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
|
||||
const mode = operation.mode || 'set';
|
||||
const meta = MODE_META[mode] || MODE_META.set;
|
||||
const conditions = operation.conditions || [];
|
||||
const syncFromTarget =
|
||||
mode === 'sync_fields'
|
||||
? parseSyncTargetSpec(operation.from)
|
||||
: null;
|
||||
const syncToTarget =
|
||||
mode === 'sync_fields'
|
||||
? parseSyncTargetSpec(operation.to)
|
||||
: null;
|
||||
return (
|
||||
<Card key={operation.id} className='!rounded-xl border'>
|
||||
<div className='flex items-center justify-between mb-2'>
|
||||
@@ -1146,7 +1196,107 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
{meta.from || meta.to === false || meta.to ? (
|
||||
{mode === 'sync_fields' ? (
|
||||
<div className='mt-2'>
|
||||
<Text type='tertiary' size='small'>
|
||||
sync endpoints
|
||||
</Text>
|
||||
<Row gutter={12} style={{ marginTop: 6 }}>
|
||||
<Col xs={24} md={12}>
|
||||
<Text type='tertiary' size='small'>
|
||||
from endpoint
|
||||
</Text>
|
||||
<div className='flex gap-2'>
|
||||
<Select
|
||||
value={syncFromTarget?.type || 'json'}
|
||||
optionList={SYNC_TARGET_TYPE_OPTIONS}
|
||||
style={{ width: 120 }}
|
||||
onChange={(nextType) =>
|
||||
updateOperation(operation.id, {
|
||||
from: buildSyncTargetSpec(
|
||||
nextType,
|
||||
syncFromTarget?.key || '',
|
||||
),
|
||||
})
|
||||
}
|
||||
/>
|
||||
<Input
|
||||
value={syncFromTarget?.key || ''}
|
||||
placeholder='session_id'
|
||||
onChange={(nextKey) =>
|
||||
updateOperation(operation.id, {
|
||||
from: buildSyncTargetSpec(
|
||||
syncFromTarget?.type || 'json',
|
||||
nextKey,
|
||||
),
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</Col>
|
||||
<Col xs={24} md={12}>
|
||||
<Text type='tertiary' size='small'>
|
||||
to endpoint
|
||||
</Text>
|
||||
<div className='flex gap-2'>
|
||||
<Select
|
||||
value={syncToTarget?.type || 'json'}
|
||||
optionList={SYNC_TARGET_TYPE_OPTIONS}
|
||||
style={{ width: 120 }}
|
||||
onChange={(nextType) =>
|
||||
updateOperation(operation.id, {
|
||||
to: buildSyncTargetSpec(
|
||||
nextType,
|
||||
syncToTarget?.key || '',
|
||||
),
|
||||
})
|
||||
}
|
||||
/>
|
||||
<Input
|
||||
value={syncToTarget?.key || ''}
|
||||
placeholder='prompt_cache_key'
|
||||
onChange={(nextKey) =>
|
||||
updateOperation(operation.id, {
|
||||
to: buildSyncTargetSpec(
|
||||
syncToTarget?.type || 'json',
|
||||
nextKey,
|
||||
),
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</Col>
|
||||
</Row>
|
||||
<Space wrap style={{ marginTop: 8 }}>
|
||||
<Tag
|
||||
size='small'
|
||||
color='cyan'
|
||||
className='cursor-pointer'
|
||||
onClick={() =>
|
||||
updateOperation(operation.id, {
|
||||
from: 'header:session_id',
|
||||
to: 'json:prompt_cache_key',
|
||||
})
|
||||
}
|
||||
>
|
||||
{'header:session_id -> json:prompt_cache_key'}
|
||||
</Tag>
|
||||
<Tag
|
||||
size='small'
|
||||
color='cyan'
|
||||
className='cursor-pointer'
|
||||
onClick={() =>
|
||||
updateOperation(operation.id, {
|
||||
from: 'json:prompt_cache_key',
|
||||
to: 'header:session_id',
|
||||
})
|
||||
}
|
||||
>
|
||||
{'json:prompt_cache_key -> header:session_id'}
|
||||
</Tag>
|
||||
</Space>
|
||||
</div>
|
||||
) : meta.from || meta.to === false || meta.to ? (
|
||||
<Row gutter={12} style={{ marginTop: 8 }}>
|
||||
{meta.from || meta.to === false ? (
|
||||
<Col xs={24} md={12}>
|
||||
|
||||
Reference in New Issue
Block a user