"VertexAI", SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.COMPLETION.value, }, ) _handle_request(span, args, kwargs, llm_model) response = wrapped(*args, **kwargs) if response: if is_streaming_response(response): return _build_from_streaming_response(span, response, llm_model) elif is_async_streaming_response(response): return _abuild_from_streaming_response(span, response, llm_model) else: _handle_response(span, response, llm_model) span.end() return response https://github.com/traceloop/openllmetry/blob/7a1b8bbea7fcffb2f7cbd127fada7b6c7b046280/pack ages/opentelemetry-instrumentation-vertexai/opentelemetry/instrumentation/vertexai/__init__.py# L322-L344 入力の情報をスパン属性に追加 出力の情報をスパン属性に追加、など