PACS System 0.1.0
PACS DICOM system library
Loading...
Searching...
No Matches
ai_service_connector::impl Class Reference
Collaboration diagram for ai_service_connector::impl:
Collaboration graph

Public Member Functions

 impl ()=default
 
 ~impl ()
 
auto initialize (const ai_service_config &config) -> Result< std::monostate >
 
void shutdown ()
 
bool is_initialized () const noexcept
 
auto request_inference (const inference_request &request) -> Result< std::string >
 
auto check_status (const std::string &job_id) -> Result< inference_status >
 
auto cancel (const std::string &job_id) -> Result< std::monostate >
 
auto wait_for_completion (const std::string &job_id, std::chrono::milliseconds timeout, status_callback callback) -> Result< inference_status >
 
auto list_active_jobs () -> Result< std::vector< inference_status > >
 
auto list_models () -> Result< std::vector< model_info > >
 
auto get_model_info (const std::string &model_id) -> Result< model_info >
 
bool check_health ()
 
auto get_latency () -> std::optional< std::chrono::milliseconds >
 
const ai_service_config & get_config () const
 
auto update_credentials (authentication_type auth_type, const std::string &credentials) -> Result< std::monostate >
 

Private Attributes

std::mutex mutex_
 
std::atomic< bool > initialized_ {false}
 
ai_service_config config_
 
std::unique_ptr< http_clienthttp_client_
 
std::unique_ptr< job_trackerjob_tracker_
 

Detailed Description

Definition at line 899 of file ai_service_connector.cpp.

Constructor & Destructor Documentation

◆ impl()

ai_service_connector::impl::impl ( )
default

◆ ~impl()

ai_service_connector::impl::~impl ( )
inline

Definition at line 903 of file ai_service_connector.cpp.

903 {
904 shutdown();
905 }

Member Function Documentation

◆ cancel()

auto ai_service_connector::impl::cancel ( const std::string & job_id) -> Result<std::monostate>
inline

Definition at line 1055 of file ai_service_connector.cpp.

1055 {
1056 if (!initialized_) {
1057 return error_info(-1, "AI service connector not initialized", "ai_service_connector");
1058 }
1059
1060 auto response = http_client_->del("/inference/" + job_id);
1061 if (response.is_err()) {
1062 return response.error();
1063 }
1064
1065 if (!response.value().is_success() && response.value().status_code != 404) {
1066 return error_info(
1067 response.value().status_code,
1068 "Failed to cancel job",
1069 "ai_service_connector");
1070 }
1071
1072 // Update local status
1073 inference_status status;
1074 status.job_id = job_id;
1075 status.status = inference_status_code::cancelled;
1076 status.message = "Job cancelled by user";
1077 status.completed_at = std::chrono::system_clock::now();
1078 job_tracker_->update_status(job_id, status);
1079
1080 kcenon::pacs::integration::logger_adapter::info("Inference job cancelled: {}", job_id);
1081
1082 return std::monostate{};
1083 }
std::unique_ptr< job_tracker > job_tracker_
std::unique_ptr< http_client > http_client_
auto del(const std::string &path) -> Result< http_response >
void update_status(const std::string &job_id, const inference_status &status)
static void info(kcenon::pacs::compat::format_string< Args... > fmt, Args &&... args)
Log an info-level message.
constexpr dicom_tag status
Status.
kcenon::common::error_info error_info
Error information type.
Definition result.h:40

References kcenon::pacs::integration::logger_adapter::info().

Here is the call graph for this function:

◆ check_health()

bool ai_service_connector::impl::check_health ( )
inlinenodiscard

Definition at line 1193 of file ai_service_connector.cpp.

1193 {
1194 if (!initialized_ || !http_client_) {
1195 return false;
1196 }
1198 }
auto check_connectivity() -> bool

◆ check_status()

auto ai_service_connector::impl::check_status ( const std::string & job_id) -> Result<inference_status>
inline

Definition at line 1014 of file ai_service_connector.cpp.

1014 {
1015 if (!initialized_) {
1016 return error_info(-1, "AI service connector not initialized", "ai_service_connector");
1017 }
1018
1019 // Check local cache first
1020 auto cached = job_tracker_->get_status(job_id);
1021 if (cached && (cached->status == inference_status_code::completed ||
1022 cached->status == inference_status_code::failed ||
1023 cached->status == inference_status_code::cancelled)) {
1024 return *cached;
1025 }
1026
1027 // Query remote service
1028 auto response = http_client_->get("/inference/" + job_id);
1029 if (response.is_err()) {
1030 return response.error();
1031 }
1032
1033 if (!response.value().is_success()) {
1034 if (response.value().status_code == 404) {
1035 return error_info(404, "Job not found: " + job_id, "ai_service_connector");
1036 }
1037 return error_info(
1038 response.value().status_code,
1039 "Failed to get job status",
1040 "ai_service_connector");
1041 }
1042
1043 // Parse status
1044 auto status = json_util::parse_status_json(response.value().body);
1045 if (!status) {
1046 return error_info(-1, "Failed to parse status response", "ai_service_connector");
1047 }
1048
1049 // Update local cache
1050 job_tracker_->update_status(job_id, *status);
1051
1052 return *status;
1053 }
auto get(const std::string &path) -> Result< http_response >
std::optional< inference_status > get_status(const std::string &job_id) const

◆ get_config()

const ai_service_config & ai_service_connector::impl::get_config ( ) const
inlinenodiscard

Definition at line 1207 of file ai_service_connector.cpp.

1207 {
1208 return config_;
1209 }

◆ get_latency()

auto ai_service_connector::impl::get_latency ( ) -> std::optional<std::chrono::milliseconds>
inlinenodiscard

Definition at line 1200 of file ai_service_connector.cpp.

1200 {
1201 if (!initialized_ || !http_client_) {
1202 return std::nullopt;
1203 }
1204 return http_client_->measure_latency();
1205 }
auto measure_latency() -> std::optional< std::chrono::milliseconds >

◆ get_model_info()

auto ai_service_connector::impl::get_model_info ( const std::string & model_id) -> Result<model_info>
inline

Definition at line 1168 of file ai_service_connector.cpp.

1168 {
1169 if (!initialized_) {
1170 return error_info(-1, "AI service connector not initialized", "ai_service_connector");
1171 }
1172
1173 auto response = http_client_->get("/models/" + model_id);
1174 if (response.is_err()) {
1175 return response.error();
1176 }
1177
1178 if (!response.value().is_success()) {
1179 return error_info(
1180 response.value().status_code,
1181 "Failed to get model info",
1182 "ai_service_connector");
1183 }
1184
1185 auto info = json_util::parse_model_json(response.value().body);
1186 if (!info) {
1187 return error_info(-1, "Failed to parse model info", "ai_service_connector");
1188 }
1189
1190 return *info;
1191 }

◆ initialize()

auto ai_service_connector::impl::initialize ( const ai_service_config & config) -> Result<std::monostate>
inline

Definition at line 907 of file ai_service_connector.cpp.

907 {
908 std::lock_guard lock(mutex_);
909
910 if (initialized_) {
911 return std::monostate{};
912 }
913
914 config_ = config;
915
916 // Validate configuration
917 if (config.base_url.empty()) {
918 return error_info(-1, "Base URL is required", "ai_service_connector");
919 }
920
921 // Initialize HTTP client
922 http_client_ = std::make_unique<http_client>(config);
923
924 // Test connectivity
927 "AI service at {} is not responding, continuing anyway",
928 config.base_url);
929 }
930
931 // Initialize job tracker
932 job_tracker_ = std::make_unique<job_tracker>();
933
934 // Initialize metrics
936
938 "AI service connector initialized: url={}, auth={}",
939 config.base_url, to_string(config.auth_type));
940
941 initialized_ = true;
942 return std::monostate{};
943 }
static void warn(kcenon::pacs::compat::format_string< Args... > fmt, Args &&... args)
Log a warning-level message.
static void set_gauge(std::string_view name, double value)
Set a gauge metric value.

References kcenon::pacs::integration::logger_adapter::info(), kcenon::pacs::integration::monitoring_adapter::set_gauge(), and kcenon::pacs::integration::logger_adapter::warn().

Here is the call graph for this function:

◆ is_initialized()

bool ai_service_connector::impl::is_initialized ( ) const
inlinenodiscardnoexcept

Definition at line 959 of file ai_service_connector.cpp.

959 {
960 return initialized_.load();
961 }

◆ list_active_jobs()

auto ai_service_connector::impl::list_active_jobs ( ) -> Result<std::vector<inference_status>>
inline

Definition at line 1128 of file ai_service_connector.cpp.

1128 {
1129 if (!initialized_) {
1130 return error_info(-1, "AI service connector not initialized", "ai_service_connector");
1131 }
1132
1133 return job_tracker_->get_active_jobs();
1134 }
std::vector< inference_status > get_active_jobs() const

◆ list_models()

auto ai_service_connector::impl::list_models ( ) -> Result<std::vector<model_info>>
inline

Definition at line 1136 of file ai_service_connector.cpp.

1136 {
1137 if (!initialized_) {
1138 return error_info(-1, "AI service connector not initialized", "ai_service_connector");
1139 }
1140
1141 auto response = http_client_->get("/models");
1142 if (response.is_err()) {
1143 return response.error();
1144 }
1145
1146 if (!response.value().is_success()) {
1147 return error_info(
1148 response.value().status_code,
1149 "Failed to list models",
1150 "ai_service_connector");
1151 }
1152
1153 // Parse models array from JSON response
1154 std::vector<model_info> models;
1155 auto model_objects = json_util::extract_json_array_objects(
1156 response.value().body);
1157
1158 for (const auto& obj : model_objects) {
1159 auto info = json_util::parse_model_json(obj);
1160 if (info) {
1161 models.push_back(std::move(*info));
1162 }
1163 }
1164
1165 return models;
1166 }

◆ request_inference()

auto ai_service_connector::impl::request_inference ( const inference_request & request) -> Result<std::string>
inline

Definition at line 963 of file ai_service_connector.cpp.

963 {
964 if (!initialized_) {
965 return error_info(-1, "AI service connector not initialized", "ai_service_connector");
966 }
967
968 // Validate request
969 if (request.study_instance_uid.empty()) {
970 return error_info(-1, "Study Instance UID is required", "ai_service_connector");
971 }
972 if (request.model_id.empty()) {
973 return error_info(-1, "Model ID is required", "ai_service_connector");
974 }
975
976 // Build request JSON
977 std::string body = json_util::build_request_json(request);
978
979 // Send request
980 auto response = http_client_->post("/inference", body);
981 if (response.is_err()) {
983 "Failed to submit inference request: {}",
984 response.error().message);
985 return response.error();
986 }
987
988 if (!response.value().is_success()) {
989 return error_info(
990 response.value().status_code,
991 "AI service returned error: " + response.value().body,
992 "ai_service_connector");
993 }
994
995 // Parse response to get job ID
996 auto job_id = json_util::extract_string(response.value().body, "job_id");
997 if (!job_id) {
998 return error_info(-1, "Failed to parse job ID from response", "ai_service_connector");
999 }
1000
1001 // Track the job
1002 job_tracker_->add_job(*job_id, request);
1003
1005 "Inference request submitted: job_id={}, study={}, model={}",
1006 *job_id, request.study_instance_uid, request.model_id);
1007
1009 metrics::inference_requests_total);
1010
1011 return *job_id;
1012 }
auto post(const std::string &path, const std::string &body, const std::string &content_type="application/json") -> Result< http_response >
void add_job(const std::string &job_id, const inference_request &request)
static void error(kcenon::pacs::compat::format_string< Args... > fmt, Args &&... args)
Log an error-level message.
static void increment_counter(std::string_view name, std::int64_t value=1)
Increment a counter metric.

References kcenon::pacs::integration::logger_adapter::error(), kcenon::pacs::integration::monitoring_adapter::increment_counter(), kcenon::pacs::integration::logger_adapter::info(), kcenon::pacs::ai::inference_request::model_id, and kcenon::pacs::ai::inference_request::study_instance_uid.

Here is the call graph for this function:

◆ shutdown()

void ai_service_connector::impl::shutdown ( )
inline

Definition at line 945 of file ai_service_connector.cpp.

945 {
946 std::lock_guard lock(mutex_);
947
948 if (!initialized_) {
949 return;
950 }
951
952 kcenon::pacs::integration::logger_adapter::info("AI service connector shutting down");
953
954 http_client_.reset();
955 job_tracker_.reset();
956 initialized_ = false;
957 }

References kcenon::pacs::integration::logger_adapter::info().

Here is the call graph for this function:

◆ update_credentials()

auto ai_service_connector::impl::update_credentials ( authentication_type auth_type,
const std::string & credentials ) -> Result<std::monostate>
inline

Definition at line 1211 of file ai_service_connector.cpp.

1212 {
1213 std::lock_guard lock(mutex_);
1214
1215 config_.auth_type = auth_type;
1216
1217 switch (auth_type) {
1218 case authentication_type::api_key:
1219 config_.api_key = credentials;
1220 break;
1221 case authentication_type::bearer_token:
1222 config_.bearer_token = credentials;
1223 break;
1224 case authentication_type::basic: {
1225 auto colon_pos = credentials.find(':');
1226 if (colon_pos != std::string::npos) {
1227 config_.username = credentials.substr(0, colon_pos);
1228 config_.password = credentials.substr(colon_pos + 1);
1229 } else {
1230 return error_info(-1, "Basic auth credentials must be in format 'username:password'",
1231 "ai_service_connector");
1232 }
1233 break;
1234 }
1235 case authentication_type::none:
1236 break;
1237 }
1238
1239 // Recreate HTTP client with new credentials
1240 http_client_ = std::make_unique<http_client>(config_);
1241
1243 "AI service credentials updated: auth={}",
1244 to_string(auth_type));
1245
1246 return std::monostate{};
1247 }

References kcenon::pacs::integration::logger_adapter::info().

Here is the call graph for this function:

◆ wait_for_completion()

auto ai_service_connector::impl::wait_for_completion ( const std::string & job_id,
std::chrono::milliseconds timeout,
status_callback callback ) -> Result<inference_status>
inline

Definition at line 1085 of file ai_service_connector.cpp.

1087 {
1088 if (!initialized_) {
1089 return error_info(-1, "AI service connector not initialized", "ai_service_connector");
1090 }
1091
1092 auto start_time = std::chrono::steady_clock::now();
1093 auto deadline = start_time + timeout;
1094
1095 while (std::chrono::steady_clock::now() < deadline) {
1096 auto result = check_status(job_id);
1097 if (result.is_err()) {
1098 return result;
1099 }
1100
1101 auto& status = result.value();
1102
1103 // Call callback if provided
1104 if (callback) {
1105 callback(status);
1106 }
1107
1108 // Check if job is complete
1109 if (status.status == inference_status_code::completed ||
1110 status.status == inference_status_code::failed ||
1111 status.status == inference_status_code::cancelled ||
1112 status.status == inference_status_code::timeout) {
1113 return status;
1114 }
1115
1116 // Wait before polling again
1117 std::this_thread::sleep_for(config_.polling_interval);
1118 }
1119
1120 // Timeout
1121 inference_status timeout_status;
1122 timeout_status.job_id = job_id;
1123 timeout_status.status = inference_status_code::timeout;
1124 timeout_status.message = "Timed out waiting for job completion";
1125 return timeout_status;
1126 }
auto check_status(const std::string &job_id) -> Result< inference_status >
constexpr int timeout
Lock timeout exceeded.

Member Data Documentation

◆ config_

ai_service_config ai_service_connector::impl::config_
private

Definition at line 1252 of file ai_service_connector.cpp.

◆ http_client_

std::unique_ptr<http_client> ai_service_connector::impl::http_client_
private

Definition at line 1253 of file ai_service_connector.cpp.

◆ initialized_

std::atomic<bool> ai_service_connector::impl::initialized_ {false}
private

Definition at line 1251 of file ai_service_connector.cpp.

1251{false};

◆ job_tracker_

std::unique_ptr<job_tracker> ai_service_connector::impl::job_tracker_
private

Definition at line 1254 of file ai_service_connector.cpp.

◆ mutex_

std::mutex ai_service_connector::impl::mutex_
mutableprivate

Definition at line 1250 of file ai_service_connector.cpp.


The documentation for this class was generated from the following file: