I have laptop with 980m and AUO109B UHD panel.
When i change resolution from native 3840x2160 to 1920x1080 (x1/2), i have blurred fonts. In control panel i have only GPU scaling mode. Adding custom full-hd resolution with CRU not activating Display scaling. When check (set) some established resolution in CRU, then Display scaling appears, but set the full-hd resolution make screen black.
What parameters may activate Display scaling? or i need to modify nvidia driver to set it's scale filter to pixel perfect?
You can't do display scaling with a laptop because a laptop display doesn't have a scaler. I don't think there's a way to make the GPU scale with pixel doubling. If this is for desktop use, you'd be better off using 3840x2160 and setting the Windows DPI scaling to 200%.
With this simple code (using NVAPI) we can switch between scale modes.
Code:
#include <stdio.h>
#include <windows.h>
#include "nvapi.h"
int _tmain(int argc, _TCHAR* argv[])
{
NvAPI_Status ret = NVAPI_OK;
NvU32 totalTargets = 0;
NvU32 DisplayID = 0;
NvU32 pathCount = 0;
NV_DISPLAYCONFIG_PATH_INFO *pathInfo = NULL;
NvDisplayHandle *pNvDispHandle = NULL;
ret = NvAPI_Initialize();
if (ret != NVAPI_OK)
{
printf("NvAPI_Initialize() failed = 0x%x", ret);
return 1; // Initialization failed
}
//Retrieve the display path information
ret = NvAPI_DISP_GetDisplayConfig(&pathCount, NULL);
if (ret != NVAPI_OK) return ret;
pathInfo = (NV_DISPLAYCONFIG_PATH_INFO*)malloc(pathCount * sizeof(NV_DISPLAYCONFIG_PATH_INFO));
if (!pathInfo)
{
return NVAPI_OUT_OF_MEMORY;
}
memset(pathInfo, 0, pathCount * sizeof(NV_DISPLAYCONFIG_PATH_INFO));
for (NvU32 i = 0; i < pathCount; i++)
{
pathInfo[i].version = NV_DISPLAYCONFIG_PATH_INFO_VER;
}
// Retrieve the targetInfo counts
ret = NvAPI_DISP_GetDisplayConfig(&pathCount, pathInfo);
if (ret != NVAPI_OK)
{
return ret;
}
for (NvU32 i = 0; i < pathCount; i++)
{
// Allocate the source mode info
pathInfo[i].sourceModeInfo = (NV_DISPLAYCONFIG_SOURCE_MODE_INFO*)malloc(sizeof(NV_DISPLAYCONFIG_SOURCE_MODE_INFO));
if (pathInfo[i].sourceModeInfo == NULL)
{
return NVAPI_OUT_OF_MEMORY;
}
memset(pathInfo[i].sourceModeInfo, 0, sizeof(NV_DISPLAYCONFIG_SOURCE_MODE_INFO));
// Allocate the target array
pathInfo[i].targetInfo = (NV_DISPLAYCONFIG_PATH_TARGET_INFO*)malloc(pathInfo[i].targetInfoCount * sizeof(NV_DISPLAYCONFIG_PATH_TARGET_INFO));
if (pathInfo[i].targetInfo == NULL)
{
return NVAPI_OUT_OF_MEMORY;
}
// Allocate the target details
memset(pathInfo[i].targetInfo, 0, pathInfo[i].targetInfoCount * sizeof(NV_DISPLAYCONFIG_PATH_TARGET_INFO));
for (NvU32 j = 0; j < pathInfo[i].targetInfoCount; j++)
{
pathInfo[i].targetInfo[j].details = (NV_DISPLAYCONFIG_PATH_ADVANCED_TARGET_INFO*)malloc(sizeof(NV_DISPLAYCONFIG_PATH_ADVANCED_TARGET_INFO));
memset(pathInfo[i].targetInfo[j].details, 0, sizeof(NV_DISPLAYCONFIG_PATH_ADVANCED_TARGET_INFO));
pathInfo[i].targetInfo[j].details->version = NV_DISPLAYCONFIG_PATH_ADVANCED_TARGET_INFO_VER;
}
}
// Retrieve the full path info
ret = NvAPI_DISP_GetDisplayConfig(&pathCount, pathInfo);
if (ret != NVAPI_OK)
{
return ret;
}
// swap scaling mode
pathInfo[0].targetInfo[0].details->scaling =(pathInfo[0].targetInfo[0].details->scaling == NV_SCALING_GPU_SCANOUT_TO_NATIVE) ? NV_SCALING_GPU_SCALING_TO_NATIVE : NV_SCALING_GPU_SCANOUT_TO_NATIVE;
ret = NvAPI_DISP_SetDisplayConfig(1, pathInfo, NV_DISPLAYCONFIG_DRIVER_RELOAD_ALLOWED); // NV_DISPLAYCONFIG_DRIVER_RELOAD_ALLOWED
if (ret != NVAPI_OK)
{
printf("Failed to Set Mode... return code = 0x%x", ret);
return 1;//Failed to Set Mode
}
printf("\nPress any key to exit...");
getchar();
return 0;
}
Maybe someone can help go deep in NvAPI_DISP_SetDisplayConfig function? I suppose that scaling modes are hidden inside the driver (not a DWM). Like hidden:
NvAPI_D3D10_ProcessCallbacks
NvAPI_D3D10_GetBufferStreamOutBytesWritten
NvAPI_D3D10_SetBufferStreamOutBytesWritten
Also i found old petition
https://www.change.org/p/nvidia-allow-di...pu-drivers
it's 3 years old