File: | dev/pci/drm/amd/amdgpu/amdgpu_atombios_encoders.c |
Warning: | line 210, column 2 Value stored to 'backlight_level' is never read |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* |
2 | * Copyright 2007-11 Advanced Micro Devices, Inc. |
3 | * Copyright 2008 Red Hat Inc. |
4 | * |
5 | * Permission is hereby granted, free of charge, to any person obtaining a |
6 | * copy of this software and associated documentation files (the "Software"), |
7 | * to deal in the Software without restriction, including without limitation |
8 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
9 | * and/or sell copies of the Software, and to permit persons to whom the |
10 | * Software is furnished to do so, subject to the following conditions: |
11 | * |
12 | * The above copyright notice and this permission notice shall be included in |
13 | * all copies or substantial portions of the Software. |
14 | * |
15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
18 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
19 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
20 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
21 | * OTHER DEALINGS IN THE SOFTWARE. |
22 | * |
23 | * Authors: Dave Airlie |
24 | * Alex Deucher |
25 | */ |
26 | |
27 | #include <linux/pci.h> |
28 | |
29 | #include <drm/drm_crtc_helper.h> |
30 | #include <drm/amdgpu_drm.h> |
31 | #include "amdgpu.h" |
32 | #include "amdgpu_connectors.h" |
33 | #include "amdgpu_display.h" |
34 | #include "atom.h" |
35 | #include "atombios_encoders.h" |
36 | #include "atombios_dp.h" |
37 | #include <linux/backlight.h> |
38 | #include "bif/bif_4_1_d.h" |
39 | |
40 | u8 |
41 | amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev) |
42 | { |
43 | u8 backlight_level; |
44 | u32 bios_2_scratch; |
45 | |
46 | bios_2_scratch = RREG32(mmBIOS_SCRATCH_2)amdgpu_device_rreg(adev, (0x5cb), 0); |
47 | |
48 | backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK0x0000FF00L) >> |
49 | ATOM_S2_CURRENT_BL_LEVEL_SHIFT8); |
50 | |
51 | return backlight_level; |
52 | } |
53 | |
54 | void |
55 | amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev, |
56 | u8 backlight_level) |
57 | { |
58 | u32 bios_2_scratch; |
59 | |
60 | bios_2_scratch = RREG32(mmBIOS_SCRATCH_2)amdgpu_device_rreg(adev, (0x5cb), 0); |
61 | |
62 | bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK0x0000FF00L; |
63 | bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT8) & |
64 | ATOM_S2_CURRENT_BL_LEVEL_MASK0x0000FF00L); |
65 | |
66 | WREG32(mmBIOS_SCRATCH_2, bios_2_scratch)amdgpu_device_wreg(adev, (0x5cb), (bios_2_scratch), 0); |
67 | } |
68 | |
69 | u8 |
70 | amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder) |
71 | { |
72 | struct drm_device *dev = amdgpu_encoder->base.dev; |
73 | struct amdgpu_device *adev = drm_to_adev(dev); |
74 | |
75 | if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU0x0020)) |
76 | return 0; |
77 | |
78 | return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); |
79 | } |
80 | |
81 | void |
82 | amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder, |
83 | u8 level) |
84 | { |
85 | struct drm_encoder *encoder = &amdgpu_encoder->base; |
86 | struct drm_device *dev = amdgpu_encoder->base.dev; |
87 | struct amdgpu_device *adev = drm_to_adev(dev); |
88 | struct amdgpu_encoder_atom_dig *dig; |
89 | |
90 | if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU0x0020)) |
91 | return; |
92 | |
93 | if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT((0x1L << 0x00000001 ) | (0x1L << 0x00000005 )))) && |
94 | amdgpu_encoder->enc_priv) { |
95 | dig = amdgpu_encoder->enc_priv; |
96 | dig->backlight_level = level; |
97 | amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level); |
98 | |
99 | switch (amdgpu_encoder->encoder_id) { |
100 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
101 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA0x1F: |
102 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
103 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
104 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY30x25: |
105 | if (dig->backlight_level == 0) |
106 | amdgpu_atombios_encoder_setup_dig_transmitter(encoder, |
107 | ATOM_TRANSMITTER_ACTION_LCD_BLOFF2, 0, 0); |
108 | else { |
109 | amdgpu_atombios_encoder_setup_dig_transmitter(encoder, |
110 | ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL4, 0, 0); |
111 | amdgpu_atombios_encoder_setup_dig_transmitter(encoder, |
112 | ATOM_TRANSMITTER_ACTION_LCD_BLON3, 0, 0); |
113 | } |
114 | break; |
115 | default: |
116 | break; |
117 | } |
118 | } |
119 | } |
120 | |
121 | #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE1) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE) |
122 | |
123 | static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd) |
124 | { |
125 | u8 level; |
126 | |
127 | /* Convert brightness to hardware level */ |
128 | if (bd->props.brightness < 0) |
129 | level = 0; |
130 | else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL0xFF) |
131 | level = AMDGPU_MAX_BL_LEVEL0xFF; |
132 | else |
133 | level = bd->props.brightness; |
134 | |
135 | return level; |
136 | } |
137 | |
138 | static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd) |
139 | { |
140 | struct amdgpu_backlight_privdata *pdata = bl_get_data(bd)(bd)->data; |
141 | struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; |
142 | |
143 | amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, |
144 | amdgpu_atombios_encoder_backlight_level(bd)); |
145 | |
146 | return 0; |
147 | } |
148 | |
149 | static int |
150 | amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd) |
151 | { |
152 | struct amdgpu_backlight_privdata *pdata = bl_get_data(bd)(bd)->data; |
153 | struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; |
154 | struct drm_device *dev = amdgpu_encoder->base.dev; |
155 | struct amdgpu_device *adev = drm_to_adev(dev); |
156 | |
157 | return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); |
158 | } |
159 | |
160 | static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = { |
161 | .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness, |
162 | .update_status = amdgpu_atombios_encoder_update_backlight_status, |
163 | }; |
164 | |
165 | void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder, |
166 | struct drm_connector *drm_connector) |
167 | { |
168 | struct drm_device *dev = amdgpu_encoder->base.dev; |
169 | struct amdgpu_device *adev = drm_to_adev(dev); |
170 | struct backlight_device *bd; |
171 | struct backlight_properties props; |
172 | struct amdgpu_backlight_privdata *pdata; |
173 | struct amdgpu_encoder_atom_dig *dig; |
174 | u8 backlight_level; |
175 | char bl_name[16]; |
176 | |
177 | /* Mac laptops with multiple GPUs use the gmux driver for backlight |
178 | * so don't register a backlight device |
179 | */ |
180 | if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE0x106b) && |
181 | (adev->pdev->device == 0x6741)) |
182 | return; |
183 | |
184 | if (!amdgpu_encoder->enc_priv) |
185 | return; |
186 | |
187 | if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU0x0020)) |
188 | return; |
189 | |
190 | pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL(0x0001 | 0x0004)); |
191 | if (!pdata) { |
192 | DRM_ERROR("Memory allocation failed\n")__drm_err("Memory allocation failed\n"); |
193 | goto error; |
194 | } |
195 | |
196 | memset(&props, 0, sizeof(props))__builtin_memset((&props), (0), (sizeof(props))); |
197 | props.max_brightness = AMDGPU_MAX_BL_LEVEL0xFF; |
198 | props.type = BACKLIGHT_RAW0; |
199 | snprintf(bl_name, sizeof(bl_name), |
200 | "amdgpu_bl%d", dev->primary->index); |
201 | bd = backlight_device_register(bl_name, drm_connector->kdev, |
202 | pdata, &amdgpu_atombios_encoder_backlight_ops, &props); |
203 | if (IS_ERR(bd)) { |
204 | DRM_ERROR("Backlight registration failed\n")__drm_err("Backlight registration failed\n"); |
205 | goto error; |
206 | } |
207 | |
208 | pdata->encoder = amdgpu_encoder; |
209 | |
210 | backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); |
Value stored to 'backlight_level' is never read | |
211 | |
212 | dig = amdgpu_encoder->enc_priv; |
213 | dig->bl_dev = bd; |
214 | |
215 | bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd); |
216 | bd->props.power = FB_BLANK_UNBLANK0; |
217 | backlight_update_status(bd); |
218 | |
219 | DRM_INFO("amdgpu atom DIG backlight initialized\n")printk("\0016" "[" "drm" "] " "amdgpu atom DIG backlight initialized\n" ); |
220 | |
221 | return; |
222 | |
223 | error: |
224 | kfree(pdata); |
225 | return; |
226 | } |
227 | |
228 | void |
229 | amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder) |
230 | { |
231 | struct drm_device *dev = amdgpu_encoder->base.dev; |
232 | struct amdgpu_device *adev = drm_to_adev(dev); |
233 | struct backlight_device *bd = NULL((void *)0); |
234 | struct amdgpu_encoder_atom_dig *dig; |
235 | |
236 | if (!amdgpu_encoder->enc_priv) |
237 | return; |
238 | |
239 | if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU0x0020)) |
240 | return; |
241 | |
242 | dig = amdgpu_encoder->enc_priv; |
243 | bd = dig->bl_dev; |
244 | dig->bl_dev = NULL((void *)0); |
245 | |
246 | if (bd) { |
247 | struct amdgpu_legacy_backlight_privdata *pdata; |
248 | |
249 | pdata = bl_get_data(bd)(bd)->data; |
250 | backlight_device_unregister(bd); |
251 | kfree(pdata); |
252 | |
253 | DRM_INFO("amdgpu atom LVDS backlight unloaded\n")printk("\0016" "[" "drm" "] " "amdgpu atom LVDS backlight unloaded\n" ); |
254 | } |
255 | } |
256 | |
257 | #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */ |
258 | |
259 | void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder) |
260 | { |
261 | } |
262 | |
263 | void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder) |
264 | { |
265 | } |
266 | |
267 | #endif |
268 | |
269 | bool_Bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder) |
270 | { |
271 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
272 | switch (amdgpu_encoder->encoder_id) { |
273 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO10x14: |
274 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
275 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
276 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
277 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY30x25: |
278 | return true1; |
279 | default: |
280 | return false0; |
281 | } |
282 | } |
283 | |
284 | bool_Bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder, |
285 | const struct drm_display_mode *mode, |
286 | struct drm_display_mode *adjusted_mode) |
287 | { |
288 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
289 | |
290 | /* set the active encoder to connector routing */ |
291 | amdgpu_encoder_set_active_device(encoder); |
292 | drm_mode_set_crtcinfo(adjusted_mode, 0); |
293 | |
294 | /* hw bug */ |
295 | if ((mode->flags & DRM_MODE_FLAG_INTERLACE(1<<4)) |
296 | && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2))) |
297 | adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2; |
298 | |
299 | /* vertical FP must be at least 1 */ |
300 | if (mode->crtc_vsync_start == mode->crtc_vdisplay) |
301 | adjusted_mode->crtc_vsync_start++; |
302 | |
303 | /* get the native mode for scaling */ |
304 | if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT((0x1L << 0x00000001 ) | (0x1L << 0x00000005 )))) |
305 | amdgpu_panel_mode_fixup(encoder, adjusted_mode); |
306 | else if (amdgpu_encoder->rmx_type != RMX_OFF) |
307 | amdgpu_panel_mode_fixup(encoder, adjusted_mode); |
308 | |
309 | if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT((0x1L << 0x00000003 ) | (0x1L << 0x00000007 ) | ( 0x1L << 0x00000009 ) | (0x1L << 0x0000000A ) | (0x1L << 0x0000000B ) | (0x1L << 0x00000006 )) | ATOM_DEVICE_LCD_SUPPORT((0x1L << 0x00000001 ) | (0x1L << 0x00000005 )))) || |
310 | (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE0x00)) { |
311 | struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); |
312 | amdgpu_atombios_dp_set_link_config(connector, adjusted_mode); |
313 | } |
314 | |
315 | return true1; |
316 | } |
317 | |
318 | static void |
319 | amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action) |
320 | { |
321 | struct drm_device *dev = encoder->dev; |
322 | struct amdgpu_device *adev = drm_to_adev(dev); |
323 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
324 | DAC_ENCODER_CONTROL_PS_ALLOCATIONDAC_ENCODER_CONTROL_PARAMETERS args; |
325 | int index = 0; |
326 | |
327 | memset(&args, 0, sizeof(args))__builtin_memset((&args), (0), (sizeof(args))); |
328 | |
329 | switch (amdgpu_encoder->encoder_id) { |
330 | case ENCODER_OBJECT_ID_INTERNAL_DAC10x04: |
331 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC10x15: |
332 | index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->DAC1EncoderControl )-(char*)0)/sizeof(USHORT)); |
333 | break; |
334 | case ENCODER_OBJECT_ID_INTERNAL_DAC20x05: |
335 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC20x16: |
336 | index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->DAC2EncoderControl )-(char*)0)/sizeof(USHORT)); |
337 | break; |
338 | } |
339 | |
340 | args.ucAction = action; |
341 | args.ucDacStandard = ATOM_DAC1_PS21; |
342 | args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
343 | |
344 | amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); |
345 | |
346 | } |
347 | |
348 | static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder) |
349 | { |
350 | int bpc = 8; |
351 | |
352 | if (encoder->crtc) { |
353 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc)({ const __typeof( ((struct amdgpu_crtc *)0)->base ) *__mptr = (encoder->crtc); (struct amdgpu_crtc *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_crtc, base) );}); |
354 | bpc = amdgpu_crtc->bpc; |
355 | } |
356 | |
357 | switch (bpc) { |
358 | case 0: |
359 | return PANEL_BPC_UNDEFINE0x00; |
360 | case 6: |
361 | return PANEL_6BIT_PER_COLOR0x01; |
362 | case 8: |
363 | default: |
364 | return PANEL_8BIT_PER_COLOR0x02; |
365 | case 10: |
366 | return PANEL_10BIT_PER_COLOR0x03; |
367 | case 12: |
368 | return PANEL_12BIT_PER_COLOR0x04; |
369 | case 16: |
370 | return PANEL_16BIT_PER_COLOR0x05; |
371 | } |
372 | } |
373 | |
374 | union dvo_encoder_control { |
375 | ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds; |
376 | DVO_ENCODER_CONTROL_PS_ALLOCATION dvo; |
377 | DVO_ENCODER_CONTROL_PS_ALLOCATION_V3DVO_ENCODER_CONTROL_PARAMETERS_V3 dvo_v3; |
378 | DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4DVO_ENCODER_CONTROL_PARAMETERS_V1_4 dvo_v4; |
379 | }; |
380 | |
381 | static void |
382 | amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action) |
383 | { |
384 | struct drm_device *dev = encoder->dev; |
385 | struct amdgpu_device *adev = drm_to_adev(dev); |
386 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
387 | union dvo_encoder_control args; |
388 | int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->DVOEncoderControl )-(char*)0)/sizeof(USHORT)); |
389 | uint8_t frev, crev; |
390 | |
391 | memset(&args, 0, sizeof(args))__builtin_memset((&args), (0), (sizeof(args))); |
392 | |
393 | if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) |
394 | return; |
395 | |
396 | switch (frev) { |
397 | case 1: |
398 | switch (crev) { |
399 | case 1: |
400 | /* R4xx, R5xx */ |
401 | args.ext_tmds.sXTmdsEncoder.ucEnable = action; |
402 | |
403 | if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
404 | args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL0x01; |
405 | |
406 | args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB0x00000002; |
407 | break; |
408 | case 2: |
409 | /* RS600/690/740 */ |
410 | args.dvo.sDVOEncoder.ucAction = action; |
411 | args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
412 | /* DFP1, CRT1, TV1 depending on the type of port */ |
413 | args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX0x00000003; |
414 | |
415 | if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
416 | args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL0x01; |
417 | break; |
418 | case 3: |
419 | /* R6xx */ |
420 | args.dvo_v3.ucAction = action; |
421 | args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
422 | args.dvo_v3.ucDVOConfig = 0; /* XXX */ |
423 | break; |
424 | case 4: |
425 | /* DCE8 */ |
426 | args.dvo_v4.ucAction = action; |
427 | args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
428 | args.dvo_v4.ucDVOConfig = 0; /* XXX */ |
429 | args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); |
430 | break; |
431 | default: |
432 | DRM_ERROR("Unknown table version %d, %d\n", frev, crev)__drm_err("Unknown table version %d, %d\n", frev, crev); |
433 | break; |
434 | } |
435 | break; |
436 | default: |
437 | DRM_ERROR("Unknown table version %d, %d\n", frev, crev)__drm_err("Unknown table version %d, %d\n", frev, crev); |
438 | break; |
439 | } |
440 | |
441 | amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); |
442 | } |
443 | |
444 | int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder) |
445 | { |
446 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
447 | struct drm_connector *connector; |
448 | struct amdgpu_connector *amdgpu_connector; |
449 | struct amdgpu_connector_atom_dig *dig_connector; |
450 | |
451 | /* dp bridges are always DP */ |
452 | if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE0x00) |
453 | return ATOM_ENCODER_MODE_DP0; |
454 | |
455 | /* DVO is always DVO */ |
456 | if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO10x0B) || |
457 | (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO10x14)) |
458 | return ATOM_ENCODER_MODE_DVO16; |
459 | |
460 | connector = amdgpu_get_connector_for_encoder(encoder); |
461 | /* if we don't have an active device yet, just use one of |
462 | * the connectors tied to the encoder. |
463 | */ |
464 | if (!connector) |
465 | connector = amdgpu_get_connector_for_encoder_init(encoder); |
466 | amdgpu_connector = to_amdgpu_connector(connector)({ const __typeof( ((struct amdgpu_connector *)0)->base ) * __mptr = (connector); (struct amdgpu_connector *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_connector, base) );}); |
467 | |
468 | switch (connector->connector_type) { |
469 | case DRM_MODE_CONNECTOR_DVII2: |
470 | case DRM_MODE_CONNECTOR_HDMIB12: /* HDMI-B is basically DL-DVI; analog works fine */ |
471 | if (amdgpu_audio != 0) { |
472 | if (amdgpu_connector->use_digital && |
473 | (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)) |
474 | return ATOM_ENCODER_MODE_HDMI3; |
475 | else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && |
476 | (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) |
477 | return ATOM_ENCODER_MODE_HDMI3; |
478 | else if (amdgpu_connector->use_digital) |
479 | return ATOM_ENCODER_MODE_DVI2; |
480 | else |
481 | return ATOM_ENCODER_MODE_CRT15; |
482 | } else if (amdgpu_connector->use_digital) { |
483 | return ATOM_ENCODER_MODE_DVI2; |
484 | } else { |
485 | return ATOM_ENCODER_MODE_CRT15; |
486 | } |
487 | break; |
488 | case DRM_MODE_CONNECTOR_DVID3: |
489 | case DRM_MODE_CONNECTOR_HDMIA11: |
490 | default: |
491 | if (amdgpu_audio != 0) { |
492 | if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) |
493 | return ATOM_ENCODER_MODE_HDMI3; |
494 | else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && |
495 | (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) |
496 | return ATOM_ENCODER_MODE_HDMI3; |
497 | else |
498 | return ATOM_ENCODER_MODE_DVI2; |
499 | } else { |
500 | return ATOM_ENCODER_MODE_DVI2; |
501 | } |
502 | break; |
503 | case DRM_MODE_CONNECTOR_LVDS7: |
504 | return ATOM_ENCODER_MODE_LVDS1; |
505 | break; |
506 | case DRM_MODE_CONNECTOR_DisplayPort10: |
507 | dig_connector = amdgpu_connector->con_priv; |
508 | if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT0x13) || |
509 | (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP0x14)) { |
510 | return ATOM_ENCODER_MODE_DP0; |
511 | } else if (amdgpu_audio != 0) { |
512 | if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) |
513 | return ATOM_ENCODER_MODE_HDMI3; |
514 | else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && |
515 | (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) |
516 | return ATOM_ENCODER_MODE_HDMI3; |
517 | else |
518 | return ATOM_ENCODER_MODE_DVI2; |
519 | } else { |
520 | return ATOM_ENCODER_MODE_DVI2; |
521 | } |
522 | break; |
523 | case DRM_MODE_CONNECTOR_eDP14: |
524 | return ATOM_ENCODER_MODE_DP0; |
525 | case DRM_MODE_CONNECTOR_DVIA4: |
526 | case DRM_MODE_CONNECTOR_VGA1: |
527 | return ATOM_ENCODER_MODE_CRT15; |
528 | break; |
529 | case DRM_MODE_CONNECTOR_Composite5: |
530 | case DRM_MODE_CONNECTOR_SVIDEO6: |
531 | case DRM_MODE_CONNECTOR_9PinDIN9: |
532 | /* fix me */ |
533 | return ATOM_ENCODER_MODE_TV13; |
534 | /*return ATOM_ENCODER_MODE_CV;*/ |
535 | break; |
536 | } |
537 | } |
538 | |
539 | /* |
540 | * DIG Encoder/Transmitter Setup |
541 | * |
542 | * DCE 6.0 |
543 | * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B). |
544 | * Supports up to 6 digital outputs |
545 | * - 6 DIG encoder blocks. |
546 | * - DIG to PHY mapping is hardcoded |
547 | * DIG1 drives UNIPHY0 link A, A+B |
548 | * DIG2 drives UNIPHY0 link B |
549 | * DIG3 drives UNIPHY1 link A, A+B |
550 | * DIG4 drives UNIPHY1 link B |
551 | * DIG5 drives UNIPHY2 link A, A+B |
552 | * DIG6 drives UNIPHY2 link B |
553 | * |
554 | * Routing |
555 | * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links) |
556 | * Examples: |
557 | * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI |
558 | * crtc1 -> dig1 -> UNIPHY0 link B -> DP |
559 | * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS |
560 | * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI |
561 | */ |
562 | |
563 | union dig_encoder_control { |
564 | DIG_ENCODER_CONTROL_PS_ALLOCATIONDIG_ENCODER_CONTROL_PARAMETERS v1; |
565 | DIG_ENCODER_CONTROL_PARAMETERS_V2 v2; |
566 | DIG_ENCODER_CONTROL_PARAMETERS_V3 v3; |
567 | DIG_ENCODER_CONTROL_PARAMETERS_V4 v4; |
568 | DIG_ENCODER_CONTROL_PARAMETERS_V5 v5; |
569 | }; |
570 | |
571 | void |
572 | amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder, |
573 | int action, int panel_mode) |
574 | { |
575 | struct drm_device *dev = encoder->dev; |
576 | struct amdgpu_device *adev = drm_to_adev(dev); |
577 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
578 | struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; |
579 | struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); |
580 | union dig_encoder_control args; |
581 | int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->DIGxEncoderControl )-(char*)0)/sizeof(USHORT)); |
582 | uint8_t frev, crev; |
583 | int dp_clock = 0; |
584 | int dp_lane_count = 0; |
585 | int hpd_id = AMDGPU_HPD_NONE; |
586 | |
587 | if (connector) { |
588 | struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector)({ const __typeof( ((struct amdgpu_connector *)0)->base ) * __mptr = (connector); (struct amdgpu_connector *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_connector, base) );}); |
589 | struct amdgpu_connector_atom_dig *dig_connector = |
590 | amdgpu_connector->con_priv; |
591 | |
592 | dp_clock = dig_connector->dp_clock; |
593 | dp_lane_count = dig_connector->dp_lane_count; |
594 | hpd_id = amdgpu_connector->hpd.hpd; |
595 | } |
596 | |
597 | /* no dig encoder assigned */ |
598 | if (dig->dig_encoder == -1) |
599 | return; |
600 | |
601 | memset(&args, 0, sizeof(args))__builtin_memset((&args), (0), (sizeof(args))); |
602 | |
603 | if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) |
604 | return; |
605 | |
606 | switch (frev) { |
607 | case 1: |
608 | switch (crev) { |
609 | case 1: |
610 | args.v1.ucAction = action; |
611 | args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
612 | if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE0x10) |
613 | args.v3.ucPanelMode = panel_mode; |
614 | else |
615 | args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); |
616 | |
617 | if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode)(((args.v1.ucEncoderMode) == 0) || ((args.v1.ucEncoderMode) == 5))) |
618 | args.v1.ucLaneNum = dp_lane_count; |
619 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
620 | args.v1.ucLaneNum = 8; |
621 | else |
622 | args.v1.ucLaneNum = 4; |
623 | |
624 | if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode)(((args.v1.ucEncoderMode) == 0) || ((args.v1.ucEncoderMode) == 5)) && (dp_clock == 270000)) |
625 | args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ0x01; |
626 | switch (amdgpu_encoder->encoder_id) { |
627 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
628 | args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER10x00; |
629 | break; |
630 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
631 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA0x1F: |
632 | args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER20x08; |
633 | break; |
634 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
635 | args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER30x10; |
636 | break; |
637 | } |
638 | if (dig->linkb) |
639 | args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB0x04; |
640 | else |
641 | args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA0x00; |
642 | break; |
643 | case 2: |
644 | case 3: |
645 | args.v3.ucAction = action; |
646 | args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
647 | if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE0x10) |
648 | args.v3.ucPanelMode = panel_mode; |
649 | else |
650 | args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); |
651 | |
652 | if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode)(((args.v3.ucEncoderMode) == 0) || ((args.v3.ucEncoderMode) == 5))) |
653 | args.v3.ucLaneNum = dp_lane_count; |
654 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
655 | args.v3.ucLaneNum = 8; |
656 | else |
657 | args.v3.ucLaneNum = 4; |
658 | |
659 | if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode)(((args.v3.ucEncoderMode) == 0) || ((args.v3.ucEncoderMode) == 5)) && (dp_clock == 270000)) |
660 | args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ0x01; |
661 | args.v3.acConfig.ucDigSel = dig->dig_encoder; |
662 | args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); |
663 | break; |
664 | case 4: |
665 | args.v4.ucAction = action; |
666 | args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
667 | if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE0x10) |
668 | args.v4.ucPanelMode = panel_mode; |
669 | else |
670 | args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); |
671 | |
672 | if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)(((args.v4.ucEncoderMode) == 0) || ((args.v4.ucEncoderMode) == 5))) |
673 | args.v4.ucLaneNum = dp_lane_count; |
674 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
675 | args.v4.ucLaneNum = 8; |
676 | else |
677 | args.v4.ucLaneNum = 4; |
678 | |
679 | if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)(((args.v4.ucEncoderMode) == 0) || ((args.v4.ucEncoderMode) == 5))) { |
680 | if (dp_clock == 540000) |
681 | args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ0x02; |
682 | else if (dp_clock == 324000) |
683 | args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ0x03; |
684 | else if (dp_clock == 270000) |
685 | args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ0x01; |
686 | else |
687 | args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ0x00; |
688 | } |
689 | args.v4.acConfig.ucDigSel = dig->dig_encoder; |
690 | args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); |
691 | if (hpd_id == AMDGPU_HPD_NONE) |
692 | args.v4.ucHPD_ID = 0; |
693 | else |
694 | args.v4.ucHPD_ID = hpd_id + 1; |
695 | break; |
696 | case 5: |
697 | switch (action) { |
698 | case ATOM_ENCODER_CMD_SETUP_PANEL_MODE0x10: |
699 | args.v5.asDPPanelModeParam.ucAction = action; |
700 | args.v5.asDPPanelModeParam.ucPanelMode = panel_mode; |
701 | args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder; |
702 | break; |
703 | case ATOM_ENCODER_CMD_STREAM_SETUP0x0F: |
704 | args.v5.asStreamParam.ucAction = action; |
705 | args.v5.asStreamParam.ucDigId = dig->dig_encoder; |
706 | args.v5.asStreamParam.ucDigMode = |
707 | amdgpu_atombios_encoder_get_encoder_mode(encoder); |
708 | if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode)(((args.v5.asStreamParam.ucDigMode) == 0) || ((args.v5.asStreamParam .ucDigMode) == 5))) |
709 | args.v5.asStreamParam.ucLaneNum = dp_lane_count; |
710 | else if (amdgpu_dig_monitor_is_duallink(encoder, |
711 | amdgpu_encoder->pixel_clock)) |
712 | args.v5.asStreamParam.ucLaneNum = 8; |
713 | else |
714 | args.v5.asStreamParam.ucLaneNum = 4; |
715 | args.v5.asStreamParam.ulPixelClock = |
716 | cpu_to_le32(amdgpu_encoder->pixel_clock / 10)((__uint32_t)(amdgpu_encoder->pixel_clock / 10)); |
717 | args.v5.asStreamParam.ucBitPerColor = |
718 | amdgpu_atombios_encoder_get_bpc(encoder); |
719 | args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000; |
720 | break; |
721 | case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START0x08: |
722 | case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN10x09: |
723 | case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN20x0a: |
724 | case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN30x13: |
725 | case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN40x14: |
726 | case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE0x0b: |
727 | case ATOM_ENCODER_CMD_DP_VIDEO_OFF0x0c: |
728 | case ATOM_ENCODER_CMD_DP_VIDEO_ON0x0d: |
729 | args.v5.asCmdParam.ucAction = action; |
730 | args.v5.asCmdParam.ucDigId = dig->dig_encoder; |
731 | break; |
732 | default: |
733 | DRM_ERROR("Unsupported action 0x%x\n", action)__drm_err("Unsupported action 0x%x\n", action); |
734 | break; |
735 | } |
736 | break; |
737 | default: |
738 | DRM_ERROR("Unknown table version %d, %d\n", frev, crev)__drm_err("Unknown table version %d, %d\n", frev, crev); |
739 | break; |
740 | } |
741 | break; |
742 | default: |
743 | DRM_ERROR("Unknown table version %d, %d\n", frev, crev)__drm_err("Unknown table version %d, %d\n", frev, crev); |
744 | break; |
745 | } |
746 | |
747 | amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); |
748 | |
749 | } |
750 | |
751 | union dig_transmitter_control { |
752 | DIG_TRANSMITTER_CONTROL_PS_ALLOCATIONDIG_TRANSMITTER_CONTROL_PARAMETERS v1; |
753 | DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2; |
754 | DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3; |
755 | DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4; |
756 | DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5; |
757 | DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6; |
758 | }; |
759 | |
760 | void |
761 | amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action, |
762 | uint8_t lane_num, uint8_t lane_set) |
763 | { |
764 | struct drm_device *dev = encoder->dev; |
765 | struct amdgpu_device *adev = drm_to_adev(dev); |
766 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
767 | struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; |
768 | struct drm_connector *connector; |
769 | union dig_transmitter_control args; |
770 | int index = 0; |
771 | uint8_t frev, crev; |
772 | bool_Bool is_dp = false0; |
773 | int pll_id = 0; |
774 | int dp_clock = 0; |
775 | int dp_lane_count = 0; |
776 | int connector_object_id = 0; |
777 | int igp_lane_info = 0; |
778 | int dig_encoder = dig->dig_encoder; |
779 | int hpd_id = AMDGPU_HPD_NONE; |
780 | |
781 | if (action == ATOM_TRANSMITTER_ACTION_INIT7) { |
782 | connector = amdgpu_get_connector_for_encoder_init(encoder); |
783 | /* just needed to avoid bailing in the encoder check. the encoder |
784 | * isn't used for init |
785 | */ |
786 | dig_encoder = 0; |
787 | } else |
788 | connector = amdgpu_get_connector_for_encoder(encoder); |
789 | |
790 | if (connector) { |
791 | struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector)({ const __typeof( ((struct amdgpu_connector *)0)->base ) * __mptr = (connector); (struct amdgpu_connector *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_connector, base) );}); |
792 | struct amdgpu_connector_atom_dig *dig_connector = |
793 | amdgpu_connector->con_priv; |
794 | |
795 | hpd_id = amdgpu_connector->hpd.hpd; |
796 | dp_clock = dig_connector->dp_clock; |
797 | dp_lane_count = dig_connector->dp_lane_count; |
798 | connector_object_id = |
799 | (amdgpu_connector->connector_object_id & OBJECT_ID_MASK0x00FF) >> OBJECT_ID_SHIFT0x00; |
800 | } |
801 | |
802 | if (encoder->crtc) { |
803 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc)({ const __typeof( ((struct amdgpu_crtc *)0)->base ) *__mptr = (encoder->crtc); (struct amdgpu_crtc *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_crtc, base) );}); |
804 | pll_id = amdgpu_crtc->pll_id; |
805 | } |
806 | |
807 | /* no dig encoder assigned */ |
808 | if (dig_encoder == -1) |
809 | return; |
810 | |
811 | if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))(((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 0) || ((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 5))) |
812 | is_dp = true1; |
813 | |
814 | memset(&args, 0, sizeof(args))__builtin_memset((&args), (0), (sizeof(args))); |
815 | |
816 | switch (amdgpu_encoder->encoder_id) { |
817 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO10x14: |
818 | index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->DVOOutputControl )-(char*)0)/sizeof(USHORT)); |
819 | break; |
820 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
821 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
822 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
823 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY30x25: |
824 | index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->DIG1TransmitterControl )-(char*)0)/sizeof(USHORT)); |
825 | break; |
826 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA0x1F: |
827 | index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->DIG2TransmitterControl )-(char*)0)/sizeof(USHORT)); |
828 | break; |
829 | } |
830 | |
831 | if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) |
832 | return; |
833 | |
834 | switch (frev) { |
835 | case 1: |
836 | switch (crev) { |
837 | case 1: |
838 | args.v1.ucAction = action; |
839 | if (action == ATOM_TRANSMITTER_ACTION_INIT7) { |
840 | args.v1.usInitInfo = cpu_to_le16(connector_object_id)((__uint16_t)(connector_object_id)); |
841 | } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH11) { |
842 | args.v1.asMode.ucLaneSel = lane_num; |
843 | args.v1.asMode.ucLaneSet = lane_set; |
844 | } else { |
845 | if (is_dp) |
846 | args.v1.usPixelClock = cpu_to_le16(dp_clock / 10)((__uint16_t)(dp_clock / 10)); |
847 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
848 | args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10)((__uint16_t)((amdgpu_encoder->pixel_clock / 2) / 10)); |
849 | else |
850 | args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
851 | } |
852 | |
853 | args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL0x00; |
854 | |
855 | if (dig_encoder) |
856 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER0x08; |
857 | else |
858 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER0x00; |
859 | |
860 | if ((adev->flags & AMD_IS_APU) && |
861 | (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E)) { |
862 | if (is_dp || |
863 | !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) { |
864 | if (igp_lane_info & 0x1) |
865 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_30x00; |
866 | else if (igp_lane_info & 0x2) |
867 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_70x40; |
868 | else if (igp_lane_info & 0x4) |
869 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_110x80; |
870 | else if (igp_lane_info & 0x8) |
871 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_150xc0; |
872 | } else { |
873 | if (igp_lane_info & 0x3) |
874 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_70x00; |
875 | else if (igp_lane_info & 0xc) |
876 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_150x80; |
877 | } |
878 | } |
879 | |
880 | if (dig->linkb) |
881 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB0x04; |
882 | else |
883 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA0x00; |
884 | |
885 | if (is_dp) |
886 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT0x02; |
887 | else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT((0x1L << 0x00000003 ) | (0x1L << 0x00000007 ) | ( 0x1L << 0x00000009 ) | (0x1L << 0x0000000A ) | (0x1L << 0x0000000B ) | (0x1L << 0x00000006 )))) { |
888 | if (dig->coherent_mode) |
889 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT0x02; |
890 | if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
891 | args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK0x01; |
892 | } |
893 | break; |
894 | case 2: |
895 | args.v2.ucAction = action; |
896 | if (action == ATOM_TRANSMITTER_ACTION_INIT7) { |
897 | args.v2.usInitInfo = cpu_to_le16(connector_object_id)((__uint16_t)(connector_object_id)); |
898 | } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH11) { |
899 | args.v2.asMode.ucLaneSel = lane_num; |
900 | args.v2.asMode.ucLaneSet = lane_set; |
901 | } else { |
902 | if (is_dp) |
903 | args.v2.usPixelClock = cpu_to_le16(dp_clock / 10)((__uint16_t)(dp_clock / 10)); |
904 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
905 | args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10)((__uint16_t)((amdgpu_encoder->pixel_clock / 2) / 10)); |
906 | else |
907 | args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
908 | } |
909 | |
910 | args.v2.acConfig.ucEncoderSel = dig_encoder; |
911 | if (dig->linkb) |
912 | args.v2.acConfig.ucLinkSel = 1; |
913 | |
914 | switch (amdgpu_encoder->encoder_id) { |
915 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
916 | args.v2.acConfig.ucTransmitterSel = 0; |
917 | break; |
918 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
919 | args.v2.acConfig.ucTransmitterSel = 1; |
920 | break; |
921 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
922 | args.v2.acConfig.ucTransmitterSel = 2; |
923 | break; |
924 | } |
925 | |
926 | if (is_dp) { |
927 | args.v2.acConfig.fCoherentMode = 1; |
928 | args.v2.acConfig.fDPConnector = 1; |
929 | } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT((0x1L << 0x00000003 ) | (0x1L << 0x00000007 ) | ( 0x1L << 0x00000009 ) | (0x1L << 0x0000000A ) | (0x1L << 0x0000000B ) | (0x1L << 0x00000006 )))) { |
930 | if (dig->coherent_mode) |
931 | args.v2.acConfig.fCoherentMode = 1; |
932 | if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
933 | args.v2.acConfig.fDualLinkConnector = 1; |
934 | } |
935 | break; |
936 | case 3: |
937 | args.v3.ucAction = action; |
938 | if (action == ATOM_TRANSMITTER_ACTION_INIT7) { |
939 | args.v3.usInitInfo = cpu_to_le16(connector_object_id)((__uint16_t)(connector_object_id)); |
940 | } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH11) { |
941 | args.v3.asMode.ucLaneSel = lane_num; |
942 | args.v3.asMode.ucLaneSet = lane_set; |
943 | } else { |
944 | if (is_dp) |
945 | args.v3.usPixelClock = cpu_to_le16(dp_clock / 10)((__uint16_t)(dp_clock / 10)); |
946 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
947 | args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10)((__uint16_t)((amdgpu_encoder->pixel_clock / 2) / 10)); |
948 | else |
949 | args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
950 | } |
951 | |
952 | if (is_dp) |
953 | args.v3.ucLaneNum = dp_lane_count; |
954 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
955 | args.v3.ucLaneNum = 8; |
956 | else |
957 | args.v3.ucLaneNum = 4; |
958 | |
959 | if (dig->linkb) |
960 | args.v3.acConfig.ucLinkSel = 1; |
961 | if (dig_encoder & 1) |
962 | args.v3.acConfig.ucEncoderSel = 1; |
963 | |
964 | /* Select the PLL for the PHY |
965 | * DP PHY should be clocked from external src if there is |
966 | * one. |
967 | */ |
968 | /* On DCE4, if there is an external clock, it generates the DP ref clock */ |
969 | if (is_dp && adev->clock.dp_extclk) |
970 | args.v3.acConfig.ucRefClkSource = 2; /* external src */ |
971 | else |
972 | args.v3.acConfig.ucRefClkSource = pll_id; |
973 | |
974 | switch (amdgpu_encoder->encoder_id) { |
975 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
976 | args.v3.acConfig.ucTransmitterSel = 0; |
977 | break; |
978 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
979 | args.v3.acConfig.ucTransmitterSel = 1; |
980 | break; |
981 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
982 | args.v3.acConfig.ucTransmitterSel = 2; |
983 | break; |
984 | } |
985 | |
986 | if (is_dp) |
987 | args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */ |
988 | else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT((0x1L << 0x00000003 ) | (0x1L << 0x00000007 ) | ( 0x1L << 0x00000009 ) | (0x1L << 0x0000000A ) | (0x1L << 0x0000000B ) | (0x1L << 0x00000006 )))) { |
989 | if (dig->coherent_mode) |
990 | args.v3.acConfig.fCoherentMode = 1; |
991 | if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
992 | args.v3.acConfig.fDualLinkConnector = 1; |
993 | } |
994 | break; |
995 | case 4: |
996 | args.v4.ucAction = action; |
997 | if (action == ATOM_TRANSMITTER_ACTION_INIT7) { |
998 | args.v4.usInitInfo = cpu_to_le16(connector_object_id)((__uint16_t)(connector_object_id)); |
999 | } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH11) { |
1000 | args.v4.asMode.ucLaneSel = lane_num; |
1001 | args.v4.asMode.ucLaneSet = lane_set; |
1002 | } else { |
1003 | if (is_dp) |
1004 | args.v4.usPixelClock = cpu_to_le16(dp_clock / 10)((__uint16_t)(dp_clock / 10)); |
1005 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
1006 | args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10)((__uint16_t)((amdgpu_encoder->pixel_clock / 2) / 10)); |
1007 | else |
1008 | args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
1009 | } |
1010 | |
1011 | if (is_dp) |
1012 | args.v4.ucLaneNum = dp_lane_count; |
1013 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
1014 | args.v4.ucLaneNum = 8; |
1015 | else |
1016 | args.v4.ucLaneNum = 4; |
1017 | |
1018 | if (dig->linkb) |
1019 | args.v4.acConfig.ucLinkSel = 1; |
1020 | if (dig_encoder & 1) |
1021 | args.v4.acConfig.ucEncoderSel = 1; |
1022 | |
1023 | /* Select the PLL for the PHY |
1024 | * DP PHY should be clocked from external src if there is |
1025 | * one. |
1026 | */ |
1027 | /* On DCE5 DCPLL usually generates the DP ref clock */ |
1028 | if (is_dp) { |
1029 | if (adev->clock.dp_extclk) |
1030 | args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK3; |
1031 | else |
1032 | args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL2; |
1033 | } else |
1034 | args.v4.acConfig.ucRefClkSource = pll_id; |
1035 | |
1036 | switch (amdgpu_encoder->encoder_id) { |
1037 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
1038 | args.v4.acConfig.ucTransmitterSel = 0; |
1039 | break; |
1040 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
1041 | args.v4.acConfig.ucTransmitterSel = 1; |
1042 | break; |
1043 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
1044 | args.v4.acConfig.ucTransmitterSel = 2; |
1045 | break; |
1046 | } |
1047 | |
1048 | if (is_dp) |
1049 | args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */ |
1050 | else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT((0x1L << 0x00000003 ) | (0x1L << 0x00000007 ) | ( 0x1L << 0x00000009 ) | (0x1L << 0x0000000A ) | (0x1L << 0x0000000B ) | (0x1L << 0x00000006 )))) { |
1051 | if (dig->coherent_mode) |
1052 | args.v4.acConfig.fCoherentMode = 1; |
1053 | if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
1054 | args.v4.acConfig.fDualLinkConnector = 1; |
1055 | } |
1056 | break; |
1057 | case 5: |
1058 | args.v5.ucAction = action; |
1059 | if (is_dp) |
1060 | args.v5.usSymClock = cpu_to_le16(dp_clock / 10)((__uint16_t)(dp_clock / 10)); |
1061 | else |
1062 | args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
1063 | |
1064 | switch (amdgpu_encoder->encoder_id) { |
1065 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
1066 | if (dig->linkb) |
1067 | args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB1; |
1068 | else |
1069 | args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA0; |
1070 | break; |
1071 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
1072 | if (dig->linkb) |
1073 | args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD3; |
1074 | else |
1075 | args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC2; |
1076 | break; |
1077 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
1078 | if (dig->linkb) |
1079 | args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF5; |
1080 | else |
1081 | args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE4; |
1082 | break; |
1083 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY30x25: |
1084 | args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG6; |
1085 | break; |
1086 | } |
1087 | if (is_dp) |
1088 | args.v5.ucLaneNum = dp_lane_count; |
1089 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
1090 | args.v5.ucLaneNum = 8; |
1091 | else |
1092 | args.v5.ucLaneNum = 4; |
1093 | args.v5.ucConnObjId = connector_object_id; |
1094 | args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); |
1095 | |
1096 | if (is_dp && adev->clock.dp_extclk) |
1097 | args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK3; |
1098 | else |
1099 | args.v5.asConfig.ucPhyClkSrcId = pll_id; |
1100 | |
1101 | if (is_dp) |
1102 | args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */ |
1103 | else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT((0x1L << 0x00000003 ) | (0x1L << 0x00000007 ) | ( 0x1L << 0x00000009 ) | (0x1L << 0x0000000A ) | (0x1L << 0x0000000B ) | (0x1L << 0x00000006 )))) { |
1104 | if (dig->coherent_mode) |
1105 | args.v5.asConfig.ucCoherentMode = 1; |
1106 | } |
1107 | if (hpd_id == AMDGPU_HPD_NONE) |
1108 | args.v5.asConfig.ucHPDSel = 0; |
1109 | else |
1110 | args.v5.asConfig.ucHPDSel = hpd_id + 1; |
1111 | args.v5.ucDigEncoderSel = 1 << dig_encoder; |
1112 | args.v5.ucDPLaneSet = lane_set; |
1113 | break; |
1114 | case 6: |
1115 | args.v6.ucAction = action; |
1116 | if (is_dp) |
1117 | args.v6.ulSymClock = cpu_to_le32(dp_clock / 10)((__uint32_t)(dp_clock / 10)); |
1118 | else |
1119 | args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10)((__uint32_t)(amdgpu_encoder->pixel_clock / 10)); |
1120 | |
1121 | switch (amdgpu_encoder->encoder_id) { |
1122 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
1123 | if (dig->linkb) |
1124 | args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB1; |
1125 | else |
1126 | args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA0; |
1127 | break; |
1128 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
1129 | if (dig->linkb) |
1130 | args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD3; |
1131 | else |
1132 | args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC2; |
1133 | break; |
1134 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
1135 | if (dig->linkb) |
1136 | args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF5; |
1137 | else |
1138 | args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE4; |
1139 | break; |
1140 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY30x25: |
1141 | args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG6; |
1142 | break; |
1143 | } |
1144 | if (is_dp) |
1145 | args.v6.ucLaneNum = dp_lane_count; |
1146 | else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
1147 | args.v6.ucLaneNum = 8; |
1148 | else |
1149 | args.v6.ucLaneNum = 4; |
1150 | args.v6.ucConnObjId = connector_object_id; |
1151 | if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH11) |
1152 | args.v6.ucDPLaneSet = lane_set; |
1153 | else |
1154 | args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); |
1155 | |
1156 | if (hpd_id == AMDGPU_HPD_NONE) |
1157 | args.v6.ucHPDSel = 0; |
1158 | else |
1159 | args.v6.ucHPDSel = hpd_id + 1; |
1160 | args.v6.ucDigEncoderSel = 1 << dig_encoder; |
1161 | break; |
1162 | default: |
1163 | DRM_ERROR("Unknown table version %d, %d\n", frev, crev)__drm_err("Unknown table version %d, %d\n", frev, crev); |
1164 | break; |
1165 | } |
1166 | break; |
1167 | default: |
1168 | DRM_ERROR("Unknown table version %d, %d\n", frev, crev)__drm_err("Unknown table version %d, %d\n", frev, crev); |
1169 | break; |
1170 | } |
1171 | |
1172 | amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); |
1173 | } |
1174 | |
1175 | bool_Bool |
1176 | amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector, |
1177 | int action) |
1178 | { |
1179 | struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector)({ const __typeof( ((struct amdgpu_connector *)0)->base ) * __mptr = (connector); (struct amdgpu_connector *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_connector, base) );}); |
1180 | struct drm_device *dev = amdgpu_connector->base.dev; |
1181 | struct amdgpu_device *adev = drm_to_adev(dev); |
1182 | union dig_transmitter_control args; |
1183 | int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->DIG1TransmitterControl )-(char*)0)/sizeof(USHORT)); |
1184 | uint8_t frev, crev; |
1185 | |
1186 | if (connector->connector_type != DRM_MODE_CONNECTOR_eDP14) |
1187 | goto done; |
1188 | |
1189 | if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON12) && |
1190 | (action != ATOM_TRANSMITTER_ACTION_POWER_OFF13)) |
1191 | goto done; |
1192 | |
1193 | if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) |
1194 | goto done; |
1195 | |
1196 | memset(&args, 0, sizeof(args))__builtin_memset((&args), (0), (sizeof(args))); |
1197 | |
1198 | args.v1.ucAction = action; |
1199 | |
1200 | amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); |
1201 | |
1202 | /* wait for the panel to power up */ |
1203 | if (action == ATOM_TRANSMITTER_ACTION_POWER_ON12) { |
1204 | int i; |
1205 | |
1206 | for (i = 0; i < 300; i++) { |
1207 | if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd)(adev)->mode_info.funcs->hpd_sense((adev), (amdgpu_connector ->hpd.hpd))) |
1208 | return true1; |
1209 | mdelay(1); |
1210 | } |
1211 | return false0; |
1212 | } |
1213 | done: |
1214 | return true1; |
1215 | } |
1216 | |
1217 | union external_encoder_control { |
1218 | EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1; |
1219 | EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3; |
1220 | }; |
1221 | |
1222 | static void |
1223 | amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder, |
1224 | struct drm_encoder *ext_encoder, |
1225 | int action) |
1226 | { |
1227 | struct drm_device *dev = encoder->dev; |
1228 | struct amdgpu_device *adev = drm_to_adev(dev); |
1229 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
1230 | struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (ext_encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_encoder, base) );}); |
1231 | union external_encoder_control args; |
1232 | struct drm_connector *connector; |
1233 | int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->ExternalEncoderControl )-(char*)0)/sizeof(USHORT)); |
1234 | u8 frev, crev; |
1235 | int dp_clock = 0; |
1236 | int dp_lane_count = 0; |
1237 | int connector_object_id = 0; |
1238 | u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK0x0700) >> ENUM_ID_SHIFT0x08; |
1239 | |
1240 | if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT0x07) |
1241 | connector = amdgpu_get_connector_for_encoder_init(encoder); |
1242 | else |
1243 | connector = amdgpu_get_connector_for_encoder(encoder); |
1244 | |
1245 | if (connector) { |
1246 | struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector)({ const __typeof( ((struct amdgpu_connector *)0)->base ) * __mptr = (connector); (struct amdgpu_connector *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_connector, base) );}); |
1247 | struct amdgpu_connector_atom_dig *dig_connector = |
1248 | amdgpu_connector->con_priv; |
1249 | |
1250 | dp_clock = dig_connector->dp_clock; |
1251 | dp_lane_count = dig_connector->dp_lane_count; |
1252 | connector_object_id = |
1253 | (amdgpu_connector->connector_object_id & OBJECT_ID_MASK0x00FF) >> OBJECT_ID_SHIFT0x00; |
1254 | } |
1255 | |
1256 | memset(&args, 0, sizeof(args))__builtin_memset((&args), (0), (sizeof(args))); |
1257 | |
1258 | if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) |
1259 | return; |
1260 | |
1261 | switch (frev) { |
1262 | case 1: |
1263 | /* no params on frev 1 */ |
1264 | break; |
1265 | case 2: |
1266 | switch (crev) { |
1267 | case 1: |
1268 | case 2: |
1269 | args.v1.sDigEncoder.ucAction = action; |
1270 | args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
1271 | args.v1.sDigEncoder.ucEncoderMode = |
1272 | amdgpu_atombios_encoder_get_encoder_mode(encoder); |
1273 | |
1274 | if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)(((args.v1.sDigEncoder.ucEncoderMode) == 0) || ((args.v1.sDigEncoder .ucEncoderMode) == 5))) { |
1275 | if (dp_clock == 270000) |
1276 | args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ0x01; |
1277 | args.v1.sDigEncoder.ucLaneNum = dp_lane_count; |
1278 | } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
1279 | args.v1.sDigEncoder.ucLaneNum = 8; |
1280 | else |
1281 | args.v1.sDigEncoder.ucLaneNum = 4; |
1282 | break; |
1283 | case 3: |
1284 | args.v3.sExtEncoder.ucAction = action; |
1285 | if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT0x07) |
1286 | args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id)((__uint16_t)(connector_object_id)); |
1287 | else |
1288 | args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10)((__uint16_t)(amdgpu_encoder->pixel_clock / 10)); |
1289 | args.v3.sExtEncoder.ucEncoderMode = |
1290 | amdgpu_atombios_encoder_get_encoder_mode(encoder); |
1291 | |
1292 | if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)(((args.v3.sExtEncoder.ucEncoderMode) == 0) || ((args.v3.sExtEncoder .ucEncoderMode) == 5))) { |
1293 | if (dp_clock == 270000) |
1294 | args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ0x01; |
1295 | else if (dp_clock == 540000) |
1296 | args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ0x02; |
1297 | args.v3.sExtEncoder.ucLaneNum = dp_lane_count; |
1298 | } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) |
1299 | args.v3.sExtEncoder.ucLaneNum = 8; |
1300 | else |
1301 | args.v3.sExtEncoder.ucLaneNum = 4; |
1302 | switch (ext_enum) { |
1303 | case GRAPH_OBJECT_ENUM_ID10x01: |
1304 | args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER10x00; |
1305 | break; |
1306 | case GRAPH_OBJECT_ENUM_ID20x02: |
1307 | args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER20x10; |
1308 | break; |
1309 | case GRAPH_OBJECT_ENUM_ID30x03: |
1310 | args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER30x20; |
1311 | break; |
1312 | } |
1313 | args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); |
1314 | break; |
1315 | default: |
1316 | DRM_ERROR("Unknown table version: %d, %d\n", frev, crev)__drm_err("Unknown table version: %d, %d\n", frev, crev); |
1317 | return; |
1318 | } |
1319 | break; |
1320 | default: |
1321 | DRM_ERROR("Unknown table version: %d, %d\n", frev, crev)__drm_err("Unknown table version: %d, %d\n", frev, crev); |
1322 | return; |
1323 | } |
1324 | amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); |
1325 | } |
1326 | |
1327 | static void |
1328 | amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action) |
1329 | { |
1330 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
1331 | struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); |
1332 | struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; |
1333 | struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); |
1334 | struct amdgpu_connector *amdgpu_connector = NULL((void *)0); |
1335 | struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL((void *)0); |
1336 | |
1337 | if (connector) { |
1338 | amdgpu_connector = to_amdgpu_connector(connector)({ const __typeof( ((struct amdgpu_connector *)0)->base ) * __mptr = (connector); (struct amdgpu_connector *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_connector, base) );}); |
1339 | amdgpu_dig_connector = amdgpu_connector->con_priv; |
1340 | } |
1341 | |
1342 | if (action == ATOM_ENABLE1) { |
1343 | if (!connector) |
1344 | dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE0x00; |
1345 | else |
1346 | dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector); |
1347 | |
1348 | /* setup and enable the encoder */ |
1349 | amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP0x0f, 0); |
1350 | amdgpu_atombios_encoder_setup_dig_encoder(encoder, |
1351 | ATOM_ENCODER_CMD_SETUP_PANEL_MODE0x10, |
1352 | dig->panel_mode); |
1353 | if (ext_encoder) |
1354 | amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, |
1355 | EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP0x0f); |
1356 | if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))(((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 0) || ((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 5)) && |
1357 | connector) { |
1358 | if (connector->connector_type == DRM_MODE_CONNECTOR_eDP14) { |
1359 | amdgpu_atombios_encoder_set_edp_panel_power(connector, |
1360 | ATOM_TRANSMITTER_ACTION_POWER_ON12); |
1361 | amdgpu_dig_connector->edp_on = true1; |
1362 | } |
1363 | } |
1364 | /* enable the transmitter */ |
1365 | amdgpu_atombios_encoder_setup_dig_transmitter(encoder, |
1366 | ATOM_TRANSMITTER_ACTION_ENABLE1, |
1367 | 0, 0); |
1368 | if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))(((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 0) || ((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 5)) && |
1369 | connector) { |
1370 | /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */ |
1371 | amdgpu_atombios_dp_link_train(encoder, connector); |
1372 | amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON0x0d, 0); |
1373 | } |
1374 | if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT((0x1L << 0x00000001 ) | (0x1L << 0x00000005 )))) |
1375 | amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level); |
1376 | if (ext_encoder) |
1377 | amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE1); |
1378 | } else { |
1379 | if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))(((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 0) || ((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 5)) && |
1380 | connector) |
1381 | amdgpu_atombios_encoder_setup_dig_encoder(encoder, |
1382 | ATOM_ENCODER_CMD_DP_VIDEO_OFF0x0c, 0); |
1383 | if (ext_encoder) |
1384 | amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE0); |
1385 | if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT((0x1L << 0x00000001 ) | (0x1L << 0x00000005 )))) |
1386 | amdgpu_atombios_encoder_setup_dig_transmitter(encoder, |
1387 | ATOM_TRANSMITTER_ACTION_LCD_BLOFF2, 0, 0); |
1388 | |
1389 | if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))(((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 0) || ((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 5)) && |
1390 | connector) |
1391 | amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D30x2); |
1392 | /* disable the transmitter */ |
1393 | amdgpu_atombios_encoder_setup_dig_transmitter(encoder, |
1394 | ATOM_TRANSMITTER_ACTION_DISABLE0, 0, 0); |
1395 | if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))(((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 0) || ((amdgpu_atombios_encoder_get_encoder_mode(encoder)) == 5)) && |
1396 | connector) { |
1397 | if (connector->connector_type == DRM_MODE_CONNECTOR_eDP14) { |
1398 | amdgpu_atombios_encoder_set_edp_panel_power(connector, |
1399 | ATOM_TRANSMITTER_ACTION_POWER_OFF13); |
1400 | amdgpu_dig_connector->edp_on = false0; |
1401 | } |
1402 | } |
1403 | } |
1404 | } |
1405 | |
1406 | void |
1407 | amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode) |
1408 | { |
1409 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
1410 | |
1411 | DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",__drm_dbg(DRM_UT_KMS, "encoder dpms %d to mode %d, devices %08x, active_devices %08x\n" , amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices , amdgpu_encoder->active_device) |
1412 | amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,__drm_dbg(DRM_UT_KMS, "encoder dpms %d to mode %d, devices %08x, active_devices %08x\n" , amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices , amdgpu_encoder->active_device) |
1413 | amdgpu_encoder->active_device)__drm_dbg(DRM_UT_KMS, "encoder dpms %d to mode %d, devices %08x, active_devices %08x\n" , amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices , amdgpu_encoder->active_device); |
1414 | switch (amdgpu_encoder->encoder_id) { |
1415 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
1416 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
1417 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
1418 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY30x25: |
1419 | switch (mode) { |
1420 | case DRM_MODE_DPMS_ON0: |
1421 | amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE1); |
1422 | break; |
1423 | case DRM_MODE_DPMS_STANDBY1: |
1424 | case DRM_MODE_DPMS_SUSPEND2: |
1425 | case DRM_MODE_DPMS_OFF3: |
1426 | amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE0); |
1427 | break; |
1428 | } |
1429 | break; |
1430 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO10x14: |
1431 | switch (mode) { |
1432 | case DRM_MODE_DPMS_ON0: |
1433 | amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE1); |
1434 | break; |
1435 | case DRM_MODE_DPMS_STANDBY1: |
1436 | case DRM_MODE_DPMS_SUSPEND2: |
1437 | case DRM_MODE_DPMS_OFF3: |
1438 | amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE0); |
1439 | break; |
1440 | } |
1441 | break; |
1442 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC10x15: |
1443 | switch (mode) { |
1444 | case DRM_MODE_DPMS_ON0: |
1445 | amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE1); |
1446 | break; |
1447 | case DRM_MODE_DPMS_STANDBY1: |
1448 | case DRM_MODE_DPMS_SUSPEND2: |
1449 | case DRM_MODE_DPMS_OFF3: |
1450 | amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE0); |
1451 | break; |
1452 | } |
1453 | break; |
1454 | default: |
1455 | return; |
1456 | } |
1457 | } |
1458 | |
1459 | union crtc_source_param { |
1460 | SELECT_CRTC_SOURCE_PS_ALLOCATIONSELECT_CRTC_SOURCE_PARAMETERS v1; |
1461 | SELECT_CRTC_SOURCE_PARAMETERS_V2 v2; |
1462 | SELECT_CRTC_SOURCE_PARAMETERS_V3 v3; |
1463 | }; |
1464 | |
1465 | void |
1466 | amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder) |
1467 | { |
1468 | struct drm_device *dev = encoder->dev; |
1469 | struct amdgpu_device *adev = drm_to_adev(dev); |
1470 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
1471 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc)({ const __typeof( ((struct amdgpu_crtc *)0)->base ) *__mptr = (encoder->crtc); (struct amdgpu_crtc *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_crtc, base) );}); |
1472 | union crtc_source_param args; |
1473 | int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->SelectCRTC_Source )-(char*)0)/sizeof(USHORT)); |
1474 | uint8_t frev, crev; |
1475 | struct amdgpu_encoder_atom_dig *dig; |
1476 | |
1477 | memset(&args, 0, sizeof(args))__builtin_memset((&args), (0), (sizeof(args))); |
1478 | |
1479 | if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) |
1480 | return; |
1481 | |
1482 | switch (frev) { |
1483 | case 1: |
1484 | switch (crev) { |
1485 | case 1: |
1486 | default: |
1487 | args.v1.ucCRTC = amdgpu_crtc->crtc_id; |
1488 | switch (amdgpu_encoder->encoder_id) { |
1489 | case ENCODER_OBJECT_ID_INTERNAL_TMDS10x02: |
1490 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS10x13: |
1491 | args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX0x00000003; |
1492 | break; |
1493 | case ENCODER_OBJECT_ID_INTERNAL_LVDS0x01: |
1494 | case ENCODER_OBJECT_ID_INTERNAL_LVTM10x0F: |
1495 | if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT(0x1L << 0x00000001 )) |
1496 | args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX0x00000001; |
1497 | else |
1498 | args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX0x00000009; |
1499 | break; |
1500 | case ENCODER_OBJECT_ID_INTERNAL_DVO10x0B: |
1501 | case ENCODER_OBJECT_ID_INTERNAL_DDI0x19: |
1502 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO10x14: |
1503 | args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX0x00000007; |
1504 | break; |
1505 | case ENCODER_OBJECT_ID_INTERNAL_DAC10x04: |
1506 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC10x15: |
1507 | if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT(0x1L << 0x00000002 ))) |
1508 | args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX0x00000002; |
1509 | else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT(0x1L << 0x00000008 ))) |
1510 | args.v1.ucDevice = ATOM_DEVICE_CV_INDEX0x00000008; |
1511 | else |
1512 | args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX0x00000000; |
1513 | break; |
1514 | case ENCODER_OBJECT_ID_INTERNAL_DAC20x05: |
1515 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC20x16: |
1516 | if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT(0x1L << 0x00000002 ))) |
1517 | args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX0x00000002; |
1518 | else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT(0x1L << 0x00000008 ))) |
1519 | args.v1.ucDevice = ATOM_DEVICE_CV_INDEX0x00000008; |
1520 | else |
1521 | args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX0x00000004; |
1522 | break; |
1523 | } |
1524 | break; |
1525 | case 2: |
1526 | args.v2.ucCRTC = amdgpu_crtc->crtc_id; |
1527 | if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE0x00) { |
1528 | struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); |
1529 | |
1530 | if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS7) |
1531 | args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS1; |
1532 | else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA1) |
1533 | args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT15; |
1534 | else |
1535 | args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); |
1536 | } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT((0x1L << 0x00000001 ) | (0x1L << 0x00000005 )))) { |
1537 | args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS1; |
1538 | } else { |
1539 | args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); |
1540 | } |
1541 | switch (amdgpu_encoder->encoder_id) { |
1542 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
1543 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
1544 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
1545 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY30x25: |
1546 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA0x1F: |
1547 | dig = amdgpu_encoder->enc_priv; |
1548 | switch (dig->dig_encoder) { |
1549 | case 0: |
1550 | args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID0x03; |
1551 | break; |
1552 | case 1: |
1553 | args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID0x09; |
1554 | break; |
1555 | case 2: |
1556 | args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID0x0a; |
1557 | break; |
1558 | case 3: |
1559 | args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID0x0b; |
1560 | break; |
1561 | case 4: |
1562 | args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID0x0c; |
1563 | break; |
1564 | case 5: |
1565 | args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID0x0d; |
1566 | break; |
1567 | case 6: |
1568 | args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID0x0e; |
1569 | break; |
1570 | } |
1571 | break; |
1572 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO10x14: |
1573 | args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID0x07; |
1574 | break; |
1575 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC10x15: |
1576 | if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT(0x1L << 0x00000002 ))) |
1577 | args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID0x02; |
1578 | else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT(0x1L << 0x00000008 ))) |
1579 | args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID0x02; |
1580 | else |
1581 | args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID0x00; |
1582 | break; |
1583 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC20x16: |
1584 | if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT(0x1L << 0x00000002 ))) |
1585 | args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID0x02; |
1586 | else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT(0x1L << 0x00000008 ))) |
1587 | args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID0x02; |
1588 | else |
1589 | args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID0x04; |
1590 | break; |
1591 | } |
1592 | break; |
1593 | case 3: |
1594 | args.v3.ucCRTC = amdgpu_crtc->crtc_id; |
1595 | if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE0x00) { |
1596 | struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); |
1597 | |
1598 | if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS7) |
1599 | args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS1; |
1600 | else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA1) |
1601 | args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT15; |
1602 | else |
1603 | args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); |
1604 | } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT((0x1L << 0x00000001 ) | (0x1L << 0x00000005 )))) { |
1605 | args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS1; |
1606 | } else { |
1607 | args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); |
1608 | } |
1609 | args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder); |
1610 | switch (amdgpu_encoder->encoder_id) { |
1611 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
1612 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
1613 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
1614 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY30x25: |
1615 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA0x1F: |
1616 | dig = amdgpu_encoder->enc_priv; |
1617 | switch (dig->dig_encoder) { |
1618 | case 0: |
1619 | args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID0x03; |
1620 | break; |
1621 | case 1: |
1622 | args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID0x09; |
1623 | break; |
1624 | case 2: |
1625 | args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID0x0a; |
1626 | break; |
1627 | case 3: |
1628 | args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID0x0b; |
1629 | break; |
1630 | case 4: |
1631 | args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID0x0c; |
1632 | break; |
1633 | case 5: |
1634 | args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID0x0d; |
1635 | break; |
1636 | case 6: |
1637 | args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID0x0e; |
1638 | break; |
1639 | } |
1640 | break; |
1641 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO10x14: |
1642 | args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID0x07; |
1643 | break; |
1644 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC10x15: |
1645 | if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT(0x1L << 0x00000002 ))) |
1646 | args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID0x02; |
1647 | else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT(0x1L << 0x00000008 ))) |
1648 | args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID0x02; |
1649 | else |
1650 | args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID0x00; |
1651 | break; |
1652 | case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC20x16: |
1653 | if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT(0x1L << 0x00000002 ))) |
1654 | args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID0x02; |
1655 | else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT(0x1L << 0x00000008 ))) |
1656 | args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID0x02; |
1657 | else |
1658 | args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID0x04; |
1659 | break; |
1660 | } |
1661 | break; |
1662 | } |
1663 | break; |
1664 | default: |
1665 | DRM_ERROR("Unknown table version: %d, %d\n", frev, crev)__drm_err("Unknown table version: %d, %d\n", frev, crev); |
1666 | return; |
1667 | } |
1668 | |
1669 | amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); |
1670 | } |
1671 | |
1672 | /* This only needs to be called once at startup */ |
1673 | void |
1674 | amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev) |
1675 | { |
1676 | struct drm_device *dev = adev_to_drm(adev); |
1677 | struct drm_encoder *encoder; |
1678 | |
1679 | list_for_each_entry(encoder, &dev->mode_config.encoder_list, head)for (encoder = ({ const __typeof( ((__typeof(*encoder) *)0)-> head ) *__mptr = ((&dev->mode_config.encoder_list)-> next); (__typeof(*encoder) *)( (char *)__mptr - __builtin_offsetof (__typeof(*encoder), head) );}); &encoder->head != (& dev->mode_config.encoder_list); encoder = ({ const __typeof ( ((__typeof(*encoder) *)0)->head ) *__mptr = (encoder-> head.next); (__typeof(*encoder) *)( (char *)__mptr - __builtin_offsetof (__typeof(*encoder), head) );})) { |
1680 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
1681 | struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); |
1682 | |
1683 | switch (amdgpu_encoder->encoder_id) { |
1684 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY0x1E: |
1685 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY10x20: |
1686 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY20x21: |
1687 | case ENCODER_OBJECT_ID_INTERNAL_UNIPHY30x25: |
1688 | amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT7, |
1689 | 0, 0); |
1690 | break; |
1691 | } |
1692 | |
1693 | if (ext_encoder) |
1694 | amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, |
1695 | EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT0x07); |
1696 | } |
1697 | } |
1698 | |
1699 | static bool_Bool |
1700 | amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder, |
1701 | struct drm_connector *connector) |
1702 | { |
1703 | struct drm_device *dev = encoder->dev; |
1704 | struct amdgpu_device *adev = drm_to_adev(dev); |
1705 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
1706 | struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector)({ const __typeof( ((struct amdgpu_connector *)0)->base ) * __mptr = (connector); (struct amdgpu_connector *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_connector, base) );}); |
1707 | |
1708 | if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT(0x1L << 0x00000002 ) | |
1709 | ATOM_DEVICE_CV_SUPPORT(0x1L << 0x00000008 ) | |
1710 | ATOM_DEVICE_CRT_SUPPORT((0x1L << 0x00000000 ) | (0x1L << 0x00000004 )))) { |
1711 | DAC_LOAD_DETECTION_PS_ALLOCATION args; |
1712 | int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection)(((char*)(&((ATOM_MASTER_LIST_OF_COMMAND_TABLES*)0)->DAC_LoadDetection )-(char*)0)/sizeof(USHORT)); |
1713 | uint8_t frev, crev; |
1714 | |
1715 | memset(&args, 0, sizeof(args))__builtin_memset((&args), (0), (sizeof(args))); |
1716 | |
1717 | if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) |
1718 | return false0; |
1719 | |
1720 | args.sDacload.ucMisc = 0; |
1721 | |
1722 | if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC10x04) || |
1723 | (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC10x15)) |
1724 | args.sDacload.ucDacType = ATOM_DAC_A0; |
1725 | else |
1726 | args.sDacload.ucDacType = ATOM_DAC_B1; |
1727 | |
1728 | if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT(0x1L << 0x00000000 )) |
1729 | args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT)((__uint16_t)((0x1L << 0x00000000 ))); |
1730 | else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT(0x1L << 0x00000004 )) |
1731 | args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT)((__uint16_t)((0x1L << 0x00000004 ))); |
1732 | else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT(0x1L << 0x00000008 )) { |
1733 | args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT)((__uint16_t)((0x1L << 0x00000008 ))); |
1734 | if (crev >= 3) |
1735 | args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb0x01; |
1736 | } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT(0x1L << 0x00000002 )) { |
1737 | args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT)((__uint16_t)((0x1L << 0x00000002 ))); |
1738 | if (crev >= 3) |
1739 | args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb0x01; |
1740 | } |
1741 | |
1742 | amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); |
1743 | |
1744 | return true1; |
1745 | } else |
1746 | return false0; |
1747 | } |
1748 | |
1749 | enum drm_connector_status |
1750 | amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder, |
1751 | struct drm_connector *connector) |
1752 | { |
1753 | struct drm_device *dev = encoder->dev; |
1754 | struct amdgpu_device *adev = drm_to_adev(dev); |
1755 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
1756 | struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector)({ const __typeof( ((struct amdgpu_connector *)0)->base ) * __mptr = (connector); (struct amdgpu_connector *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_connector, base) );}); |
1757 | uint32_t bios_0_scratch; |
1758 | |
1759 | if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) { |
1760 | DRM_DEBUG_KMS("detect returned false \n")__drm_dbg(DRM_UT_KMS, "detect returned false \n"); |
1761 | return connector_status_unknown; |
1762 | } |
1763 | |
1764 | bios_0_scratch = RREG32(mmBIOS_SCRATCH_0)amdgpu_device_rreg(adev, (0x5c9), 0); |
1765 | |
1766 | DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices)__drm_dbg(DRM_UT_KMS, "Bios 0 scratch %x %08x\n", bios_0_scratch , amdgpu_encoder->devices); |
1767 | if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT(0x1L << 0x00000000 )) { |
1768 | if (bios_0_scratch & ATOM_S0_CRT1_MASK(0x00000001L +0x00000002L)) |
1769 | return connector_status_connected; |
1770 | } |
1771 | if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT(0x1L << 0x00000004 )) { |
1772 | if (bios_0_scratch & ATOM_S0_CRT2_MASK(0x00000100L +0x00000200L)) |
1773 | return connector_status_connected; |
1774 | } |
1775 | if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT(0x1L << 0x00000008 )) { |
1776 | if (bios_0_scratch & (ATOM_S0_CV_MASK(0x00001000L +0x00002000L)|ATOM_S0_CV_MASK_A(0x00000010L +0x00000020L))) |
1777 | return connector_status_connected; |
1778 | } |
1779 | if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT(0x1L << 0x00000002 )) { |
1780 | if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE0x00000400L | ATOM_S0_TV1_COMPOSITE_A0x00000004L)) |
1781 | return connector_status_connected; /* CTV */ |
1782 | else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO0x00000800L | ATOM_S0_TV1_SVIDEO_A0x00000008L)) |
1783 | return connector_status_connected; /* STV */ |
1784 | } |
1785 | return connector_status_disconnected; |
1786 | } |
1787 | |
1788 | enum drm_connector_status |
1789 | amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder, |
1790 | struct drm_connector *connector) |
1791 | { |
1792 | struct drm_device *dev = encoder->dev; |
1793 | struct amdgpu_device *adev = drm_to_adev(dev); |
1794 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
1795 | struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector)({ const __typeof( ((struct amdgpu_connector *)0)->base ) * __mptr = (connector); (struct amdgpu_connector *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_connector, base) );}); |
1796 | struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); |
1797 | u32 bios_0_scratch; |
1798 | |
1799 | if (!ext_encoder) |
1800 | return connector_status_unknown; |
1801 | |
1802 | if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT((0x1L << 0x00000000 ) | (0x1L << 0x00000004 ))) == 0) |
1803 | return connector_status_unknown; |
1804 | |
1805 | /* load detect on the dp bridge */ |
1806 | amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, |
1807 | EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION0x12); |
1808 | |
1809 | bios_0_scratch = RREG32(mmBIOS_SCRATCH_0)amdgpu_device_rreg(adev, (0x5c9), 0); |
1810 | |
1811 | DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices)__drm_dbg(DRM_UT_KMS, "Bios 0 scratch %x %08x\n", bios_0_scratch , amdgpu_encoder->devices); |
1812 | if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT(0x1L << 0x00000000 )) { |
1813 | if (bios_0_scratch & ATOM_S0_CRT1_MASK(0x00000001L +0x00000002L)) |
1814 | return connector_status_connected; |
1815 | } |
1816 | if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT(0x1L << 0x00000004 )) { |
1817 | if (bios_0_scratch & ATOM_S0_CRT2_MASK(0x00000100L +0x00000200L)) |
1818 | return connector_status_connected; |
1819 | } |
1820 | if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT(0x1L << 0x00000008 )) { |
1821 | if (bios_0_scratch & (ATOM_S0_CV_MASK(0x00001000L +0x00002000L)|ATOM_S0_CV_MASK_A(0x00000010L +0x00000020L))) |
1822 | return connector_status_connected; |
1823 | } |
1824 | if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT(0x1L << 0x00000002 )) { |
1825 | if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE0x00000400L | ATOM_S0_TV1_COMPOSITE_A0x00000004L)) |
1826 | return connector_status_connected; /* CTV */ |
1827 | else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO0x00000800L | ATOM_S0_TV1_SVIDEO_A0x00000008L)) |
1828 | return connector_status_connected; /* STV */ |
1829 | } |
1830 | return connector_status_disconnected; |
1831 | } |
1832 | |
1833 | void |
1834 | amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder) |
1835 | { |
1836 | struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); |
1837 | |
1838 | if (ext_encoder) |
1839 | /* ddc_setup on the dp bridge */ |
1840 | amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, |
1841 | EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP0x14); |
1842 | |
1843 | } |
1844 | |
1845 | void |
1846 | amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector, |
1847 | struct drm_encoder *encoder, |
1848 | bool_Bool connected) |
1849 | { |
1850 | struct drm_device *dev = connector->dev; |
1851 | struct amdgpu_device *adev = drm_to_adev(dev); |
1852 | struct amdgpu_connector *amdgpu_connector = |
1853 | to_amdgpu_connector(connector)({ const __typeof( ((struct amdgpu_connector *)0)->base ) * __mptr = (connector); (struct amdgpu_connector *)( (char *)__mptr - __builtin_offsetof(struct amdgpu_connector, base) );}); |
1854 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder)({ const __typeof( ((struct amdgpu_encoder *)0)->base ) *__mptr = (encoder); (struct amdgpu_encoder *)( (char *)__mptr - __builtin_offsetof (struct amdgpu_encoder, base) );}); |
1855 | uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch; |
1856 | |
1857 | bios_0_scratch = RREG32(mmBIOS_SCRATCH_0)amdgpu_device_rreg(adev, (0x5c9), 0); |
1858 | bios_3_scratch = RREG32(mmBIOS_SCRATCH_3)amdgpu_device_rreg(adev, (0x5cc), 0); |
1859 | bios_6_scratch = RREG32(mmBIOS_SCRATCH_6)amdgpu_device_rreg(adev, (0x5cf), 0); |
1860 | |
1861 | if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT(0x1L << 0x00000001 )) && |
1862 | (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT(0x1L << 0x00000001 ))) { |
1863 | if (connected) { |
1864 | DRM_DEBUG_KMS("LCD1 connected\n")__drm_dbg(DRM_UT_KMS, "LCD1 connected\n"); |
1865 | bios_0_scratch |= ATOM_S0_LCD10x00040000L; |
1866 | bios_3_scratch |= ATOM_S3_LCD1_ACTIVE0x00000002L; |
1867 | bios_6_scratch |= ATOM_S6_ACC_REQ_LCD10x00020000L; |
1868 | } else { |
1869 | DRM_DEBUG_KMS("LCD1 disconnected\n")__drm_dbg(DRM_UT_KMS, "LCD1 disconnected\n"); |
1870 | bios_0_scratch &= ~ATOM_S0_LCD10x00040000L; |
1871 | bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE0x00000002L; |
1872 | bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD10x00020000L; |
1873 | } |
1874 | } |
1875 | if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT(0x1L << 0x00000000 )) && |
1876 | (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT(0x1L << 0x00000000 ))) { |
1877 | if (connected) { |
1878 | DRM_DEBUG_KMS("CRT1 connected\n")__drm_dbg(DRM_UT_KMS, "CRT1 connected\n"); |
1879 | bios_0_scratch |= ATOM_S0_CRT1_COLOR0x00000002L; |
1880 | bios_3_scratch |= ATOM_S3_CRT1_ACTIVE0x00000001L; |
1881 | bios_6_scratch |= ATOM_S6_ACC_REQ_CRT10x00010000L; |
1882 | } else { |
1883 | DRM_DEBUG_KMS("CRT1 disconnected\n")__drm_dbg(DRM_UT_KMS, "CRT1 disconnected\n"); |
1884 | bios_0_scratch &= ~ATOM_S0_CRT1_MASK(0x00000001L +0x00000002L); |
1885 | bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE0x00000001L; |
1886 | bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT10x00010000L; |
1887 | } |
1888 | } |
1889 | if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT(0x1L << 0x00000004 )) && |
1890 | (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT(0x1L << 0x00000004 ))) { |
1891 | if (connected) { |
1892 | DRM_DEBUG_KMS("CRT2 connected\n")__drm_dbg(DRM_UT_KMS, "CRT2 connected\n"); |
1893 | bios_0_scratch |= ATOM_S0_CRT2_COLOR0x00000200L; |
1894 | bios_3_scratch |= ATOM_S3_CRT2_ACTIVE0x00000010L; |
1895 | bios_6_scratch |= ATOM_S6_ACC_REQ_CRT20x00100000L; |
1896 | } else { |
1897 | DRM_DEBUG_KMS("CRT2 disconnected\n")__drm_dbg(DRM_UT_KMS, "CRT2 disconnected\n"); |
1898 | bios_0_scratch &= ~ATOM_S0_CRT2_MASK(0x00000100L +0x00000200L); |
1899 | bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE0x00000010L; |
1900 | bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT20x00100000L; |
1901 | } |
1902 | } |
1903 | if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT(0x1L << 0x00000003 )) && |
1904 | (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT(0x1L << 0x00000003 ))) { |
1905 | if (connected) { |
1906 | DRM_DEBUG_KMS("DFP1 connected\n")__drm_dbg(DRM_UT_KMS, "DFP1 connected\n"); |
1907 | bios_0_scratch |= ATOM_S0_DFP10x00010000L; |
1908 | bios_3_scratch |= ATOM_S3_DFP1_ACTIVE0x00000008L; |
1909 | bios_6_scratch |= ATOM_S6_ACC_REQ_DFP10x00080000L; |
1910 | } else { |
1911 | DRM_DEBUG_KMS("DFP1 disconnected\n")__drm_dbg(DRM_UT_KMS, "DFP1 disconnected\n"); |
1912 | bios_0_scratch &= ~ATOM_S0_DFP10x00010000L; |
1913 | bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE0x00000008L; |
1914 | bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP10x00080000L; |
1915 | } |
1916 | } |
1917 | if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT(0x1L << 0x00000007 )) && |
1918 | (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT(0x1L << 0x00000007 ))) { |
1919 | if (connected) { |
1920 | DRM_DEBUG_KMS("DFP2 connected\n")__drm_dbg(DRM_UT_KMS, "DFP2 connected\n"); |
1921 | bios_0_scratch |= ATOM_S0_DFP20x00020000L; |
1922 | bios_3_scratch |= ATOM_S3_DFP2_ACTIVE0x00000080L; |
1923 | bios_6_scratch |= ATOM_S6_ACC_REQ_DFP20x00800000L; |
1924 | } else { |
1925 | DRM_DEBUG_KMS("DFP2 disconnected\n")__drm_dbg(DRM_UT_KMS, "DFP2 disconnected\n"); |
1926 | bios_0_scratch &= ~ATOM_S0_DFP20x00020000L; |
1927 | bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE0x00000080L; |
1928 | bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP20x00800000L; |
1929 | } |
1930 | } |
1931 | if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT(0x1L << 0x00000009 )) && |
1932 | (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT(0x1L << 0x00000009 ))) { |
1933 | if (connected) { |
1934 | DRM_DEBUG_KMS("DFP3 connected\n")__drm_dbg(DRM_UT_KMS, "DFP3 connected\n"); |
1935 | bios_0_scratch |= ATOM_S0_DFP30x00200000L; |
1936 | bios_3_scratch |= ATOM_S3_DFP3_ACTIVE0x00000200L; |
1937 | bios_6_scratch |= ATOM_S6_ACC_REQ_DFP30x02000000L; |
1938 | } else { |
1939 | DRM_DEBUG_KMS("DFP3 disconnected\n")__drm_dbg(DRM_UT_KMS, "DFP3 disconnected\n"); |
1940 | bios_0_scratch &= ~ATOM_S0_DFP30x00200000L; |
1941 | bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE0x00000200L; |
1942 | bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP30x02000000L; |
1943 | } |
1944 | } |
1945 | if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT(0x1L << 0x0000000A )) && |
1946 | (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT(0x1L << 0x0000000A ))) { |
1947 | if (connected) { |
1948 | DRM_DEBUG_KMS("DFP4 connected\n")__drm_dbg(DRM_UT_KMS, "DFP4 connected\n"); |
1949 | bios_0_scratch |= ATOM_S0_DFP40x00400000L; |
1950 | bios_3_scratch |= ATOM_S3_DFP4_ACTIVE0x00000400L; |
1951 | bios_6_scratch |= ATOM_S6_ACC_REQ_DFP40x04000000L; |
1952 | } else { |
1953 | DRM_DEBUG_KMS("DFP4 disconnected\n")__drm_dbg(DRM_UT_KMS, "DFP4 disconnected\n"); |
1954 | bios_0_scratch &= ~ATOM_S0_DFP40x00400000L; |
1955 | bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE0x00000400L; |
1956 | bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP40x04000000L; |
1957 | } |
1958 | } |
1959 | if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT(0x1L << 0x0000000B )) && |
1960 | (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT(0x1L << 0x0000000B ))) { |
1961 | if (connected) { |
1962 | DRM_DEBUG_KMS("DFP5 connected\n")__drm_dbg(DRM_UT_KMS, "DFP5 connected\n"); |
1963 | bios_0_scratch |= ATOM_S0_DFP50x00800000L; |
1964 | bios_3_scratch |= ATOM_S3_DFP5_ACTIVE0x00000800L; |
1965 | bios_6_scratch |= ATOM_S6_ACC_REQ_DFP50x08000000L; |
1966 | } else { |
1967 | DRM_DEBUG_KMS("DFP5 disconnected\n")__drm_dbg(DRM_UT_KMS, "DFP5 disconnected\n"); |
1968 | bios_0_scratch &= ~ATOM_S0_DFP50x00800000L; |
1969 | bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE0x00000800L; |
1970 | bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP50x08000000L; |
1971 | } |
1972 | } |
1973 | if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT(0x1L << 0x00000006 )) && |
1974 | (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT(0x1L << 0x00000006 ))) { |
1975 | if (connected) { |
1976 | DRM_DEBUG_KMS("DFP6 connected\n")__drm_dbg(DRM_UT_KMS, "DFP6 connected\n"); |
1977 | bios_0_scratch |= ATOM_S0_DFP60x00100000L; |
1978 | bios_3_scratch |= ATOM_S3_DFP6_ACTIVE0x00000040L; |
1979 | bios_6_scratch |= ATOM_S6_ACC_REQ_DFP60x00400000L; |
1980 | } else { |
1981 | DRM_DEBUG_KMS("DFP6 disconnected\n")__drm_dbg(DRM_UT_KMS, "DFP6 disconnected\n"); |
1982 | bios_0_scratch &= ~ATOM_S0_DFP60x00100000L; |
1983 | bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE0x00000040L; |
1984 | bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP60x00400000L; |
1985 | } |
1986 | } |
1987 | |
1988 | WREG32(mmBIOS_SCRATCH_0, bios_0_scratch)amdgpu_device_wreg(adev, (0x5c9), (bios_0_scratch), 0); |
1989 | WREG32(mmBIOS_SCRATCH_3, bios_3_scratch)amdgpu_device_wreg(adev, (0x5cc), (bios_3_scratch), 0); |
1990 | WREG32(mmBIOS_SCRATCH_6, bios_6_scratch)amdgpu_device_wreg(adev, (0x5cf), (bios_6_scratch), 0); |
1991 | } |
1992 | |
1993 | union lvds_info { |
1994 | struct _ATOM_LVDS_INFO info; |
1995 | struct _ATOM_LVDS_INFO_V12 info_12; |
1996 | }; |
1997 | |
1998 | struct amdgpu_encoder_atom_dig * |
1999 | amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder) |
2000 | { |
2001 | struct drm_device *dev = encoder->base.dev; |
2002 | struct amdgpu_device *adev = drm_to_adev(dev); |
2003 | struct amdgpu_mode_info *mode_info = &adev->mode_info; |
2004 | int index = GetIndexIntoMasterTable(DATA, LVDS_Info)(((char*)(&((ATOM_MASTER_LIST_OF_DATA_TABLES*)0)->LCD_Info )-(char*)0)/sizeof(USHORT)); |
2005 | uint16_t data_offset, misc; |
2006 | union lvds_info *lvds_info; |
2007 | uint8_t frev, crev; |
2008 | struct amdgpu_encoder_atom_dig *lvds = NULL((void *)0); |
2009 | int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK0x0700) >> ENUM_ID_SHIFT0x08; |
2010 | |
2011 | if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL((void *)0), |
2012 | &frev, &crev, &data_offset)) { |
2013 | lvds_info = |
2014 | (union lvds_info *)(mode_info->atom_context->bios + data_offset); |
2015 | lvds = |
2016 | kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL(0x0001 | 0x0004)); |
2017 | |
2018 | if (!lvds) |
2019 | return NULL((void *)0); |
2020 | |
2021 | lvds->native_mode.clock = |
2022 | le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk)((__uint16_t)(lvds_info->info.sLCDTiming.usPixClk)) * 10; |
2023 | lvds->native_mode.hdisplay = |
2024 | le16_to_cpu(lvds_info->info.sLCDTiming.usHActive)((__uint16_t)(lvds_info->info.sLCDTiming.usHActive)); |
2025 | lvds->native_mode.vdisplay = |
2026 | le16_to_cpu(lvds_info->info.sLCDTiming.usVActive)((__uint16_t)(lvds_info->info.sLCDTiming.usVActive)); |
2027 | lvds->native_mode.htotal = lvds->native_mode.hdisplay + |
2028 | le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time)((__uint16_t)(lvds_info->info.sLCDTiming.usHBlanking_Time) ); |
2029 | lvds->native_mode.hsync_start = lvds->native_mode.hdisplay + |
2030 | le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset)((__uint16_t)(lvds_info->info.sLCDTiming.usHSyncOffset)); |
2031 | lvds->native_mode.hsync_end = lvds->native_mode.hsync_start + |
2032 | le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth)((__uint16_t)(lvds_info->info.sLCDTiming.usHSyncWidth)); |
2033 | lvds->native_mode.vtotal = lvds->native_mode.vdisplay + |
2034 | le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time)((__uint16_t)(lvds_info->info.sLCDTiming.usVBlanking_Time) ); |
2035 | lvds->native_mode.vsync_start = lvds->native_mode.vdisplay + |
2036 | le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset)((__uint16_t)(lvds_info->info.sLCDTiming.usVSyncOffset)); |
2037 | lvds->native_mode.vsync_end = lvds->native_mode.vsync_start + |
2038 | le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth)((__uint16_t)(lvds_info->info.sLCDTiming.usVSyncWidth)); |
2039 | lvds->panel_pwr_delay = |
2040 | le16_to_cpu(lvds_info->info.usOffDelayInMs)((__uint16_t)(lvds_info->info.usOffDelayInMs)); |
2041 | lvds->lcd_misc = lvds_info->info.ucLVDS_Misc; |
2042 | |
2043 | misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess)((__uint16_t)(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess )); |
2044 | if (misc & ATOM_VSYNC_POLARITY0x04) |
2045 | lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC(1<<3); |
2046 | if (misc & ATOM_HSYNC_POLARITY0x02) |
2047 | lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC(1<<1); |
2048 | if (misc & ATOM_COMPOSITESYNC0x40) |
2049 | lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC(1<<6); |
2050 | if (misc & ATOM_INTERLACE0x80) |
2051 | lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE(1<<4); |
2052 | if (misc & ATOM_DOUBLE_CLOCK_MODE0x100) |
2053 | lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN(1<<5); |
2054 | |
2055 | lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize)((__uint16_t)(lvds_info->info.sLCDTiming.usImageHSize)); |
2056 | lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize)((__uint16_t)(lvds_info->info.sLCDTiming.usImageVSize)); |
2057 | |
2058 | /* set crtc values */ |
2059 | drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V(1 << 0)); |
2060 | |
2061 | lvds->lcd_ss_id = lvds_info->info.ucSS_Id; |
2062 | |
2063 | encoder->native_mode = lvds->native_mode; |
2064 | |
2065 | if (encoder_enum == 2) |
2066 | lvds->linkb = true1; |
2067 | else |
2068 | lvds->linkb = false0; |
2069 | |
2070 | /* parse the lcd record table */ |
2071 | if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)((__uint16_t)(lvds_info->info.usModePatchTableOffset))) { |
2072 | ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record; |
2073 | ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record; |
2074 | bool_Bool bad_record = false0; |
2075 | u8 *record; |
2076 | |
2077 | if ((frev == 1) && (crev < 2)) |
2078 | /* absolute */ |
2079 | record = (u8 *)(mode_info->atom_context->bios + |
2080 | le16_to_cpu(lvds_info->info.usModePatchTableOffset)((__uint16_t)(lvds_info->info.usModePatchTableOffset))); |
2081 | else |
2082 | /* relative */ |
2083 | record = (u8 *)(mode_info->atom_context->bios + |
2084 | data_offset + |
2085 | le16_to_cpu(lvds_info->info.usModePatchTableOffset)((__uint16_t)(lvds_info->info.usModePatchTableOffset))); |
2086 | while (*record != ATOM_RECORD_END_TYPE0xFF) { |
2087 | switch (*record) { |
2088 | case LCD_MODE_PATCH_RECORD_MODE_TYPE1: |
2089 | record += sizeof(ATOM_PATCH_RECORD_MODE); |
2090 | break; |
2091 | case LCD_RTS_RECORD_TYPE2: |
2092 | record += sizeof(ATOM_LCD_RTS_RECORD); |
2093 | break; |
2094 | case LCD_CAP_RECORD_TYPE3: |
2095 | record += sizeof(ATOM_LCD_MODE_CONTROL_CAP); |
2096 | break; |
2097 | case LCD_FAKE_EDID_PATCH_RECORD_TYPE4: |
2098 | fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record; |
2099 | if (fake_edid_record->ucFakeEDIDLength) { |
2100 | struct edid *edid; |
2101 | int edid_size = |
2102 | max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength)((((int)128)>((int)fake_edid_record->ucFakeEDIDLength)) ?((int)128):((int)fake_edid_record->ucFakeEDIDLength)); |
2103 | edid = kmalloc(edid_size, GFP_KERNEL(0x0001 | 0x0004)); |
2104 | if (edid) { |
2105 | memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],__builtin_memcpy(((u8 *)edid), ((u8 *)&fake_edid_record-> ucFakeEDIDString[0]), (fake_edid_record->ucFakeEDIDLength) ) |
2106 | fake_edid_record->ucFakeEDIDLength)__builtin_memcpy(((u8 *)edid), ((u8 *)&fake_edid_record-> ucFakeEDIDString[0]), (fake_edid_record->ucFakeEDIDLength) ); |
2107 | |
2108 | if (drm_edid_is_valid(edid)) { |
2109 | adev->mode_info.bios_hardcoded_edid = edid; |
2110 | adev->mode_info.bios_hardcoded_edid_size = edid_size; |
2111 | } else |
2112 | kfree(edid); |
2113 | } |
2114 | } |
2115 | record += fake_edid_record->ucFakeEDIDLength ? |
2116 | fake_edid_record->ucFakeEDIDLength + 2 : |
2117 | sizeof(ATOM_FAKE_EDID_PATCH_RECORD); |
2118 | break; |
2119 | case LCD_PANEL_RESOLUTION_RECORD_TYPE5: |
2120 | panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record; |
2121 | lvds->native_mode.width_mm = panel_res_record->usHSize; |
2122 | lvds->native_mode.height_mm = panel_res_record->usVSize; |
2123 | record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD); |
2124 | break; |
2125 | default: |
2126 | DRM_ERROR("Bad LCD record %d\n", *record)__drm_err("Bad LCD record %d\n", *record); |
2127 | bad_record = true1; |
2128 | break; |
2129 | } |
2130 | if (bad_record) |
2131 | break; |
2132 | } |
2133 | } |
2134 | } |
2135 | return lvds; |
2136 | } |
2137 | |
2138 | struct amdgpu_encoder_atom_dig * |
2139 | amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder) |
2140 | { |
2141 | int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK0x0700) >> ENUM_ID_SHIFT0x08; |
2142 | struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL(0x0001 | 0x0004)); |
2143 | |
2144 | if (!dig) |
2145 | return NULL((void *)0); |
2146 | |
2147 | /* coherent mode by default */ |
2148 | dig->coherent_mode = true1; |
2149 | dig->dig_encoder = -1; |
2150 | |
2151 | if (encoder_enum == 2) |
2152 | dig->linkb = true1; |
2153 | else |
2154 | dig->linkb = false0; |
2155 | |
2156 | return dig; |
2157 | } |
2158 |