Linux Audio

Check our new training course

Loading...
v6.9.4
  1/*
  2 * Copyright 2012-16 Advanced Micro Devices, Inc.
  3 *
  4 * Permission is hereby granted, free of charge, to any person obtaining a
  5 * copy of this software and associated documentation files (the "Software"),
  6 * to deal in the Software without restriction, including without limitation
  7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8 * and/or sell copies of the Software, and to permit persons to whom the
  9 * Software is furnished to do so, subject to the following conditions:
 10 *
 11 * The above copyright notice and this permission notice shall be included in
 12 * all copies or substantial portions of the Software.
 13 *
 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 20 * OTHER DEALINGS IN THE SOFTWARE.
 21 *
 22 * Authors: AMD
 23 *
 24 */
 25
 26#include "dal_asic_id.h"
 27#include "dc_types.h"
 28#include "dccg.h"
 29#include "clk_mgr_internal.h"
 30#include "dc_state_priv.h"
 31#include "link.h"
 32
 33#include "dce100/dce_clk_mgr.h"
 34#include "dce110/dce110_clk_mgr.h"
 35#include "dce112/dce112_clk_mgr.h"
 36#include "dce120/dce120_clk_mgr.h"
 37#include "dce60/dce60_clk_mgr.h"
 38#include "dcn10/rv1_clk_mgr.h"
 39#include "dcn10/rv2_clk_mgr.h"
 40#include "dcn20/dcn20_clk_mgr.h"
 41#include "dcn21/rn_clk_mgr.h"
 42#include "dcn201/dcn201_clk_mgr.h"
 43#include "dcn30/dcn30_clk_mgr.h"
 44#include "dcn301/vg_clk_mgr.h"
 45#include "dcn31/dcn31_clk_mgr.h"
 46#include "dcn314/dcn314_clk_mgr.h"
 47#include "dcn315/dcn315_clk_mgr.h"
 48#include "dcn316/dcn316_clk_mgr.h"
 49#include "dcn32/dcn32_clk_mgr.h"
 50#include "dcn35/dcn35_clk_mgr.h"
 
 51
 52int clk_mgr_helper_get_active_display_cnt(
 53		struct dc *dc,
 54		struct dc_state *context)
 55{
 56	int i, display_count;
 57
 58	display_count = 0;
 59	for (i = 0; i < context->stream_count; i++) {
 60		const struct dc_stream_state *stream = context->streams[i];
 
 61
 62		/* Don't count SubVP phantom pipes as part of active
 63		 * display count
 64		 */
 65		if (dc_state_get_stream_subvp_type(context, stream) == SUBVP_PHANTOM)
 66			continue;
 67
 68		/*
 69		 * Only notify active stream or virtual stream.
 70		 * Need to notify virtual stream to work around
 71		 * headless case. HPD does not fire when system is in
 72		 * S0i2.
 73		 */
 74		if (!stream->dpms_off || stream->signal == SIGNAL_TYPE_VIRTUAL)
 75			display_count++;
 76	}
 77
 78	return display_count;
 79}
 80
 81int clk_mgr_helper_get_active_plane_cnt(
 82		struct dc *dc,
 83		struct dc_state *context)
 84{
 85	int i, total_plane_count;
 86
 87	total_plane_count = 0;
 88	for (i = 0; i < context->stream_count; i++) {
 89		const struct dc_stream_status stream_status = context->stream_status[i];
 90
 91		/*
 92		 * Sum up plane_count for all streams ( active and virtual ).
 93		 */
 94		total_plane_count += stream_status.plane_count;
 95	}
 96
 97	return total_plane_count;
 98}
 99
100void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
101{
102	struct dc_link *edp_links[MAX_NUM_EDP];
103	struct dc_link *edp_link = NULL;
104	int edp_num;
105	unsigned int panel_inst;
106
107	dc_get_edp_links(dc, edp_links, &edp_num);
108	if (dc->hwss.exit_optimized_pwr_state)
109		dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
110
111	if (edp_num) {
112		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
113			bool allow_active = false;
114
115			edp_link = edp_links[panel_inst];
116			if (!edp_link->psr_settings.psr_feature_enabled)
117				continue;
118			clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
119			dc->link_srv->edp_set_psr_allow_active(edp_link, &allow_active, false, false, NULL);
120			dc->link_srv->edp_set_replay_allow_active(edp_link, &allow_active, false, false, NULL);
121		}
122	}
123
124}
125
126void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
127{
128	struct dc_link *edp_links[MAX_NUM_EDP];
129	struct dc_link *edp_link = NULL;
130	int edp_num;
131	unsigned int panel_inst;
132
133	dc_get_edp_links(dc, edp_links, &edp_num);
134	if (edp_num) {
135		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
136			edp_link = edp_links[panel_inst];
137			if (!edp_link->psr_settings.psr_feature_enabled)
138				continue;
139			dc->link_srv->edp_set_psr_allow_active(edp_link,
140					&clk_mgr->psr_allow_active_cache, false, false, NULL);
141			dc->link_srv->edp_set_replay_allow_active(edp_link,
142					&clk_mgr->psr_allow_active_cache, false, false, NULL);
143		}
144	}
145
146	if (dc->hwss.optimize_pwr_state)
147		dc->hwss.optimize_pwr_state(dc, dc->current_state);
148
149}
150
151struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
152{
153	struct hw_asic_id asic_id = ctx->asic_id;
154
155	switch (asic_id.chip_family) {
156#if defined(CONFIG_DRM_AMD_DC_SI)
157	case FAMILY_SI: {
158		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
159
160		if (clk_mgr == NULL) {
161			BREAK_TO_DEBUGGER();
162			return NULL;
163		}
164		dce60_clk_mgr_construct(ctx, clk_mgr);
165		dce_clk_mgr_construct(ctx, clk_mgr);
166		return &clk_mgr->base;
167	}
168#endif
169	case FAMILY_CI:
170	case FAMILY_KV: {
171		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
172
173		if (clk_mgr == NULL) {
174			BREAK_TO_DEBUGGER();
175			return NULL;
176		}
177		dce_clk_mgr_construct(ctx, clk_mgr);
178		return &clk_mgr->base;
179	}
180	case FAMILY_CZ: {
181		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
182
183		if (clk_mgr == NULL) {
184			BREAK_TO_DEBUGGER();
185			return NULL;
186		}
187		dce110_clk_mgr_construct(ctx, clk_mgr);
188		return &clk_mgr->base;
189	}
190	case FAMILY_VI: {
191		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
192
193		if (clk_mgr == NULL) {
194			BREAK_TO_DEBUGGER();
195			return NULL;
196		}
197		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
198				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
199			dce_clk_mgr_construct(ctx, clk_mgr);
200			return &clk_mgr->base;
201		}
202		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
203				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
204				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
205			dce112_clk_mgr_construct(ctx, clk_mgr);
206			return &clk_mgr->base;
207		}
208		if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
209			dce112_clk_mgr_construct(ctx, clk_mgr);
210			return &clk_mgr->base;
211		}
212		return &clk_mgr->base;
213	}
214	case FAMILY_AI: {
215		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
216
217		if (clk_mgr == NULL) {
218			BREAK_TO_DEBUGGER();
219			return NULL;
220		}
221		if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
222			dce121_clk_mgr_construct(ctx, clk_mgr);
223		else
224			dce120_clk_mgr_construct(ctx, clk_mgr);
225		return &clk_mgr->base;
226	}
227#if defined(CONFIG_DRM_AMD_DC_FP)
228	case FAMILY_RV: {
229		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
230
231		if (clk_mgr == NULL) {
232			BREAK_TO_DEBUGGER();
233			return NULL;
234		}
235
236		if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
237			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
238			return &clk_mgr->base;
239		}
240
241		if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
242			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
243			return &clk_mgr->base;
244		}
245		if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
246			rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
247			return &clk_mgr->base;
248		}
249		if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
250				ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
251			rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
252			return &clk_mgr->base;
253		}
254		return &clk_mgr->base;
255	}
256	case FAMILY_NV: {
257		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
258
259		if (clk_mgr == NULL) {
260			BREAK_TO_DEBUGGER();
261			return NULL;
262		}
263		if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
264			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
265			return &clk_mgr->base;
266		}
267		if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
268			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
269			return &clk_mgr->base;
270		}
271		if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
272			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
273			return &clk_mgr->base;
274		}
275		if (asic_id.chip_id == DEVICE_ID_NV_13FE) {
276			dcn201_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
277			return &clk_mgr->base;
278		}
279		dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
280		return &clk_mgr->base;
281	}
282	case FAMILY_VGH:
283		if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
284			struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
285
286			if (clk_mgr == NULL) {
287				BREAK_TO_DEBUGGER();
288				return NULL;
289			}
290			vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
291			return &clk_mgr->base.base;
292		}
293		break;
294
295	case FAMILY_YELLOW_CARP: {
296		struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
297
298		if (clk_mgr == NULL) {
299			BREAK_TO_DEBUGGER();
300			return NULL;
301		}
302
303		dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
304		return &clk_mgr->base.base;
305	}
306		break;
307	case AMDGPU_FAMILY_GC_10_3_6: {
308		struct clk_mgr_dcn315 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
309
310		if (clk_mgr == NULL) {
311			BREAK_TO_DEBUGGER();
312			return NULL;
313		}
314
315		dcn315_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
316		return &clk_mgr->base.base;
317	}
318		break;
319	case AMDGPU_FAMILY_GC_10_3_7: {
320		struct clk_mgr_dcn316 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
321
322		if (clk_mgr == NULL) {
323			BREAK_TO_DEBUGGER();
324			return NULL;
325		}
326
327		dcn316_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
328		return &clk_mgr->base.base;
329	}
330		break;
331	case AMDGPU_FAMILY_GC_11_0_0: {
332	    struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
333
334	    if (clk_mgr == NULL) {
335		BREAK_TO_DEBUGGER();
336		return NULL;
337	    }
338
339	    dcn32_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
340	    return &clk_mgr->base;
 
 
 
 
341	}
342
343	case AMDGPU_FAMILY_GC_11_0_1: {
344		struct clk_mgr_dcn314 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
345
346		if (clk_mgr == NULL) {
347			BREAK_TO_DEBUGGER();
348			return NULL;
349		}
350
351		dcn314_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
352		return &clk_mgr->base.base;
353	}
354	break;
355
356	case AMDGPU_FAMILY_GC_11_5_0: {
357		struct clk_mgr_dcn35 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
358
359		if (clk_mgr == NULL) {
360			BREAK_TO_DEBUGGER();
361			return NULL;
362		}
 
 
 
 
363
364		dcn35_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
365		return &clk_mgr->base.base;
366	}
367	break;
368
 
 
 
 
 
 
 
 
 
 
 
369#endif	/* CONFIG_DRM_AMD_DC_FP */
370	default:
371		ASSERT(0); /* Unknown Asic */
372		break;
373	}
374
375	return NULL;
376}
377
378void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
379{
380	struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
381
382#ifdef CONFIG_DRM_AMD_DC_FP
383	switch (clk_mgr_base->ctx->asic_id.chip_family) {
384	case FAMILY_NV:
385		if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
386			dcn3_clk_mgr_destroy(clk_mgr);
387		} else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
388			dcn3_clk_mgr_destroy(clk_mgr);
389		}
390		if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
391			dcn3_clk_mgr_destroy(clk_mgr);
392		}
393		break;
394
395	case FAMILY_VGH:
396		if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
397			vg_clk_mgr_destroy(clk_mgr);
398		break;
399
400	case FAMILY_YELLOW_CARP:
401		dcn31_clk_mgr_destroy(clk_mgr);
402		break;
403
404	case AMDGPU_FAMILY_GC_10_3_6:
405		dcn315_clk_mgr_destroy(clk_mgr);
406		break;
407
408	case AMDGPU_FAMILY_GC_10_3_7:
409		dcn316_clk_mgr_destroy(clk_mgr);
410		break;
411
412	case AMDGPU_FAMILY_GC_11_0_0:
413		dcn32_clk_mgr_destroy(clk_mgr);
414		break;
415
416	case AMDGPU_FAMILY_GC_11_0_1:
417		dcn314_clk_mgr_destroy(clk_mgr);
418		break;
419
420	case AMDGPU_FAMILY_GC_11_5_0:
421		dcn35_clk_mgr_destroy(clk_mgr);
 
 
 
422		break;
423
424	default:
425		break;
426	}
427#endif /* CONFIG_DRM_AMD_DC_FP */
428
429	kfree(clk_mgr);
430}
431
v6.13.7
  1/*
  2 * Copyright 2012-16 Advanced Micro Devices, Inc.
  3 *
  4 * Permission is hereby granted, free of charge, to any person obtaining a
  5 * copy of this software and associated documentation files (the "Software"),
  6 * to deal in the Software without restriction, including without limitation
  7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8 * and/or sell copies of the Software, and to permit persons to whom the
  9 * Software is furnished to do so, subject to the following conditions:
 10 *
 11 * The above copyright notice and this permission notice shall be included in
 12 * all copies or substantial portions of the Software.
 13 *
 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 20 * OTHER DEALINGS IN THE SOFTWARE.
 21 *
 22 * Authors: AMD
 23 *
 24 */
 25
 26#include "dal_asic_id.h"
 27#include "dc_types.h"
 28#include "dccg.h"
 29#include "clk_mgr_internal.h"
 30#include "dc_state_priv.h"
 31#include "link.h"
 32
 33#include "dce100/dce_clk_mgr.h"
 34#include "dce110/dce110_clk_mgr.h"
 35#include "dce112/dce112_clk_mgr.h"
 36#include "dce120/dce120_clk_mgr.h"
 37#include "dce60/dce60_clk_mgr.h"
 38#include "dcn10/rv1_clk_mgr.h"
 39#include "dcn10/rv2_clk_mgr.h"
 40#include "dcn20/dcn20_clk_mgr.h"
 41#include "dcn21/rn_clk_mgr.h"
 42#include "dcn201/dcn201_clk_mgr.h"
 43#include "dcn30/dcn30_clk_mgr.h"
 44#include "dcn301/vg_clk_mgr.h"
 45#include "dcn31/dcn31_clk_mgr.h"
 46#include "dcn314/dcn314_clk_mgr.h"
 47#include "dcn315/dcn315_clk_mgr.h"
 48#include "dcn316/dcn316_clk_mgr.h"
 49#include "dcn32/dcn32_clk_mgr.h"
 50#include "dcn35/dcn35_clk_mgr.h"
 51#include "dcn401/dcn401_clk_mgr.h"
 52
 53int clk_mgr_helper_get_active_display_cnt(
 54		struct dc *dc,
 55		struct dc_state *context)
 56{
 57	int i, display_count;
 58
 59	display_count = 0;
 60	for (i = 0; i < context->stream_count; i++) {
 61		const struct dc_stream_state *stream = context->streams[i];
 62		const struct dc_stream_status *stream_status = &context->stream_status[i];
 63
 64		/* Don't count SubVP phantom pipes as part of active
 65		 * display count
 66		 */
 67		if (dc_state_get_stream_subvp_type(context, stream) == SUBVP_PHANTOM)
 68			continue;
 69
 70		if (!stream->dpms_off || (stream_status && stream_status->plane_count))
 
 
 
 
 
 
 71			display_count++;
 72	}
 73
 74	return display_count;
 75}
 76
 77int clk_mgr_helper_get_active_plane_cnt(
 78		struct dc *dc,
 79		struct dc_state *context)
 80{
 81	int i, total_plane_count;
 82
 83	total_plane_count = 0;
 84	for (i = 0; i < context->stream_count; i++) {
 85		const struct dc_stream_status stream_status = context->stream_status[i];
 86
 87		/*
 88		 * Sum up plane_count for all streams ( active and virtual ).
 89		 */
 90		total_plane_count += stream_status.plane_count;
 91	}
 92
 93	return total_plane_count;
 94}
 95
 96void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
 97{
 98	struct dc_link *edp_links[MAX_NUM_EDP];
 99	struct dc_link *edp_link = NULL;
100	int edp_num;
101	unsigned int panel_inst;
102
103	dc_get_edp_links(dc, edp_links, &edp_num);
104	if (dc->hwss.exit_optimized_pwr_state)
105		dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
106
107	if (edp_num) {
108		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
109			bool allow_active = false;
110
111			edp_link = edp_links[panel_inst];
112			if (!edp_link->psr_settings.psr_feature_enabled)
113				continue;
114			clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
115			dc->link_srv->edp_set_psr_allow_active(edp_link, &allow_active, false, false, NULL);
116			dc->link_srv->edp_set_replay_allow_active(edp_link, &allow_active, false, false, NULL);
117		}
118	}
119
120}
121
122void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
123{
124	struct dc_link *edp_links[MAX_NUM_EDP];
125	struct dc_link *edp_link = NULL;
126	int edp_num;
127	unsigned int panel_inst;
128
129	dc_get_edp_links(dc, edp_links, &edp_num);
130	if (edp_num) {
131		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
132			edp_link = edp_links[panel_inst];
133			if (!edp_link->psr_settings.psr_feature_enabled)
134				continue;
135			dc->link_srv->edp_set_psr_allow_active(edp_link,
136					&clk_mgr->psr_allow_active_cache, false, false, NULL);
137			dc->link_srv->edp_set_replay_allow_active(edp_link,
138					&clk_mgr->psr_allow_active_cache, false, false, NULL);
139		}
140	}
141
142	if (dc->hwss.optimize_pwr_state)
143		dc->hwss.optimize_pwr_state(dc, dc->current_state);
144
145}
146
147struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
148{
149	struct hw_asic_id asic_id = ctx->asic_id;
150
151	switch (asic_id.chip_family) {
152#if defined(CONFIG_DRM_AMD_DC_SI)
153	case FAMILY_SI: {
154		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
155
156		if (clk_mgr == NULL) {
157			BREAK_TO_DEBUGGER();
158			return NULL;
159		}
160		dce60_clk_mgr_construct(ctx, clk_mgr);
161		dce_clk_mgr_construct(ctx, clk_mgr);
162		return &clk_mgr->base;
163	}
164#endif
165	case FAMILY_CI:
166	case FAMILY_KV: {
167		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
168
169		if (clk_mgr == NULL) {
170			BREAK_TO_DEBUGGER();
171			return NULL;
172		}
173		dce_clk_mgr_construct(ctx, clk_mgr);
174		return &clk_mgr->base;
175	}
176	case FAMILY_CZ: {
177		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
178
179		if (clk_mgr == NULL) {
180			BREAK_TO_DEBUGGER();
181			return NULL;
182		}
183		dce110_clk_mgr_construct(ctx, clk_mgr);
184		return &clk_mgr->base;
185	}
186	case FAMILY_VI: {
187		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
188
189		if (clk_mgr == NULL) {
190			BREAK_TO_DEBUGGER();
191			return NULL;
192		}
193		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
194				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
195			dce_clk_mgr_construct(ctx, clk_mgr);
196			return &clk_mgr->base;
197		}
198		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
199				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
200				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
201			dce112_clk_mgr_construct(ctx, clk_mgr);
202			return &clk_mgr->base;
203		}
204		if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
205			dce112_clk_mgr_construct(ctx, clk_mgr);
206			return &clk_mgr->base;
207		}
208		return &clk_mgr->base;
209	}
210	case FAMILY_AI: {
211		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
212
213		if (clk_mgr == NULL) {
214			BREAK_TO_DEBUGGER();
215			return NULL;
216		}
217		if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
218			dce121_clk_mgr_construct(ctx, clk_mgr);
219		else
220			dce120_clk_mgr_construct(ctx, clk_mgr);
221		return &clk_mgr->base;
222	}
223#if defined(CONFIG_DRM_AMD_DC_FP)
224	case FAMILY_RV: {
225		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
226
227		if (clk_mgr == NULL) {
228			BREAK_TO_DEBUGGER();
229			return NULL;
230		}
231
232		if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
233			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
234			return &clk_mgr->base;
235		}
236
237		if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
238			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
239			return &clk_mgr->base;
240		}
241		if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
242			rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
243			return &clk_mgr->base;
244		}
245		if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
246				ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
247			rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
248			return &clk_mgr->base;
249		}
250		return &clk_mgr->base;
251	}
252	case FAMILY_NV: {
253		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
254
255		if (clk_mgr == NULL) {
256			BREAK_TO_DEBUGGER();
257			return NULL;
258		}
259		if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
260			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
261			return &clk_mgr->base;
262		}
263		if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
264			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
265			return &clk_mgr->base;
266		}
267		if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
268			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
269			return &clk_mgr->base;
270		}
271		if (ctx->dce_version == DCN_VERSION_2_01) {
272			dcn201_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
273			return &clk_mgr->base;
274		}
275		dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
276		return &clk_mgr->base;
277	}
278	case FAMILY_VGH:
279		if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
280			struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
281
282			if (clk_mgr == NULL) {
283				BREAK_TO_DEBUGGER();
284				return NULL;
285			}
286			vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
287			return &clk_mgr->base.base;
288		}
289		break;
290
291	case FAMILY_YELLOW_CARP: {
292		struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
293
294		if (clk_mgr == NULL) {
295			BREAK_TO_DEBUGGER();
296			return NULL;
297		}
298
299		dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
300		return &clk_mgr->base.base;
301	}
302		break;
303	case AMDGPU_FAMILY_GC_10_3_6: {
304		struct clk_mgr_dcn315 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
305
306		if (clk_mgr == NULL) {
307			BREAK_TO_DEBUGGER();
308			return NULL;
309		}
310
311		dcn315_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
312		return &clk_mgr->base.base;
313	}
314		break;
315	case AMDGPU_FAMILY_GC_10_3_7: {
316		struct clk_mgr_dcn316 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
317
318		if (clk_mgr == NULL) {
319			BREAK_TO_DEBUGGER();
320			return NULL;
321		}
322
323		dcn316_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
324		return &clk_mgr->base.base;
325	}
326		break;
327	case AMDGPU_FAMILY_GC_11_0_0: {
328		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
 
 
 
 
 
329
330		if (clk_mgr == NULL) {
331			BREAK_TO_DEBUGGER();
332			return NULL;
333		}
334		dcn32_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
335		return &clk_mgr->base;
336	}
337
338	case AMDGPU_FAMILY_GC_11_0_1: {
339		struct clk_mgr_dcn314 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
340
341		if (clk_mgr == NULL) {
342			BREAK_TO_DEBUGGER();
343			return NULL;
344		}
345
346		dcn314_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
347		return &clk_mgr->base.base;
348	}
349	break;
350
351	case AMDGPU_FAMILY_GC_11_5_0: {
352		struct clk_mgr_dcn35 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
353
354		if (clk_mgr == NULL) {
355			BREAK_TO_DEBUGGER();
356			return NULL;
357		}
358		if (ctx->dce_version == DCN_VERSION_3_51)
359			dcn351_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
360		else
361			dcn35_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
362
 
363		return &clk_mgr->base.base;
364	}
365	break;
366
367	case AMDGPU_FAMILY_GC_12_0_0: {
368		struct clk_mgr_internal *clk_mgr = dcn401_clk_mgr_construct(ctx, dccg);
369
370		if (clk_mgr == NULL) {
371			BREAK_TO_DEBUGGER();
372			return NULL;
373		}
374
375		return &clk_mgr->base;
376	}
377	break;
378#endif	/* CONFIG_DRM_AMD_DC_FP */
379	default:
380		ASSERT(0); /* Unknown Asic */
381		break;
382	}
383
384	return NULL;
385}
386
387void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
388{
389	struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
390
391#ifdef CONFIG_DRM_AMD_DC_FP
392	switch (clk_mgr_base->ctx->asic_id.chip_family) {
393	case FAMILY_NV:
394		if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
395			dcn3_clk_mgr_destroy(clk_mgr);
396		} else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
397			dcn3_clk_mgr_destroy(clk_mgr);
398		}
399		if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
400			dcn3_clk_mgr_destroy(clk_mgr);
401		}
402		break;
403
404	case FAMILY_VGH:
405		if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
406			vg_clk_mgr_destroy(clk_mgr);
407		break;
408
409	case FAMILY_YELLOW_CARP:
410		dcn31_clk_mgr_destroy(clk_mgr);
411		break;
412
413	case AMDGPU_FAMILY_GC_10_3_6:
414		dcn315_clk_mgr_destroy(clk_mgr);
415		break;
416
417	case AMDGPU_FAMILY_GC_10_3_7:
418		dcn316_clk_mgr_destroy(clk_mgr);
419		break;
420
421	case AMDGPU_FAMILY_GC_11_0_0:
422		dcn32_clk_mgr_destroy(clk_mgr);
423		break;
424
425	case AMDGPU_FAMILY_GC_11_0_1:
426		dcn314_clk_mgr_destroy(clk_mgr);
427		break;
428
429	case AMDGPU_FAMILY_GC_11_5_0:
430		dcn35_clk_mgr_destroy(clk_mgr);
431		break;
432	case AMDGPU_FAMILY_GC_12_0_0:
433		dcn401_clk_mgr_destroy(clk_mgr);
434		break;
435
436	default:
437		break;
438	}
439#endif /* CONFIG_DRM_AMD_DC_FP */
440
441	kfree(clk_mgr);
442}
443