struct fuse_corner_data { int ref_uV; int max_uV; int min_uV; int max_volt_scale; int max_quot_scale; /* fuse quot */ int quot_offset; int quot_scale; int quot_adjust; /* fuse quot_offset */ int quot_offset_scale; int quot_offset_adjust;
};
struct cpr_fuses { int init_voltage_step; int init_voltage_width; struct fuse_corner_data *fuse_corner_data;
};
struct fuse_corner { int min_uV; int max_uV; int uV; int quot; int step_quot; conststruct reg_sequence *accs; int num_accs; unsignedlong max_freq;
u8 ring_osc_idx;
};
struct corner { int min_uV; int max_uV; int uV; int last_uV; int quot_adjust;
u32 save_ctl;
u32 save_irq; unsignedlong freq; struct fuse_corner *fuse_corner;
};
if (drv->tcsr && dir == UP)
cpr_set_acc(drv->tcsr, prev_fuse_corner, fuse_corner);
return 0;
}
staticint cpr_scale_voltage(struct cpr_drv *drv, struct corner *corner, int new_uV, enum voltage_change_dir dir)
{ int ret; struct fuse_corner *fuse_corner = corner->fuse_corner;
ret = cpr_pre_voltage(drv, fuse_corner, dir); if (ret) return ret;
ret = regulator_set_voltage(drv->vdd_apc, new_uV, new_uV); if (ret) {
dev_err_ratelimited(drv->dev, "failed to set apc voltage %d\n",
new_uV); return ret;
}
ret = cpr_post_voltage(drv, fuse_corner, dir); if (ret) return ret;
if (dir == UP) { if (desc->clamp_timer_interval &&
error_steps < desc->up_threshold) { /* * Handle the case where another measurement started * after the interrupt was triggered due to a core * exiting from power collapse.
*/
error_steps = max(desc->up_threshold,
desc->vdd_apc_step_up_limit);
}
if (last_uV >= corner->max_uV) {
cpr_irq_clr_nack(drv);
/* Maximize the UP threshold */
reg_mask = RBCPR_CTL_UP_THRESHOLD_MASK;
reg_mask <<= RBCPR_CTL_UP_THRESHOLD_SHIFT;
val = reg_mask;
cpr_ctl_modify(drv, reg_mask, val);
/* Disable UP interrupt */
cpr_irq_set(drv, CPR_INT_DEFAULT & ~CPR_INT_UP);
return 0;
}
if (error_steps > desc->vdd_apc_step_up_limit)
error_steps = desc->vdd_apc_step_up_limit;
/* Calculate new voltage */
new_uV = last_uV + error_steps * step_uV;
new_uV = min(new_uV, corner->max_uV);
dev_dbg(drv->dev, "UP: -> new_uV: %d last_uV: %d perf state: %u\n",
new_uV, last_uV, cpr_get_cur_perf_state(drv));
} else { if (desc->clamp_timer_interval &&
error_steps < desc->down_threshold) { /* * Handle the case where another measurement started * after the interrupt was triggered due to a core * exiting from power collapse.
*/
error_steps = max(desc->down_threshold,
desc->vdd_apc_step_down_limit);
}
if (last_uV <= corner->min_uV) {
cpr_irq_clr_nack(drv);
/* Enable auto nack down */
reg_mask = RBCPR_CTL_SW_AUTO_CONT_NACK_DN_EN;
val = RBCPR_CTL_SW_AUTO_CONT_NACK_DN_EN;
cpr_ctl_modify(drv, reg_mask, val);
/* Disable DOWN interrupt */
cpr_irq_set(drv, CPR_INT_DEFAULT & ~CPR_INT_DOWN);
return 0;
}
if (error_steps > desc->vdd_apc_step_down_limit)
error_steps = desc->vdd_apc_step_down_limit;
/* Calculate new voltage */
new_uV = last_uV - error_steps * step_uV;
new_uV = max(new_uV, corner->min_uV);
ret = cpr_scale_voltage(drv, corner, new_uV, dir); if (ret) {
cpr_irq_clr_nack(drv); return ret;
}
drv->corner->last_uV = new_uV;
if (dir == UP) { /* Disable auto nack down */
reg_mask = RBCPR_CTL_SW_AUTO_CONT_NACK_DN_EN;
val = 0;
} else { /* Restore default threshold for UP */
reg_mask = RBCPR_CTL_UP_THRESHOLD_MASK;
reg_mask <<= RBCPR_CTL_UP_THRESHOLD_SHIFT;
val = desc->up_threshold;
val <<= RBCPR_CTL_UP_THRESHOLD_SHIFT;
}
/* Program the default HW ceiling, floor and vlevel */
val = (RBIF_LIMIT_CEILING_DEFAULT & RBIF_LIMIT_CEILING_MASK)
<< RBIF_LIMIT_CEILING_SHIFT;
val |= RBIF_LIMIT_FLOOR_DEFAULT & RBIF_LIMIT_FLOOR_MASK;
cpr_write(drv, REG_RBIF_LIMIT, val);
cpr_write(drv, REG_RBIF_SW_VLEVEL, RBIF_SW_VLEVEL_DEFAULT);
/* * Clear the target quotient value and gate count of all * ring oscillators
*/ for (i = 0; i < CPR_NUM_RING_OSC; i++)
cpr_write(drv, REG_RBCPR_GCNT_TARGET(i), 0);
/* Program the delay count for the timer */
val = (drv->ref_clk_khz * desc->timer_delay_us) / 1000;
cpr_write(drv, REG_RBCPR_TIMER_INTERVAL, val);
dev_dbg(drv->dev, "Timer count: %#0x (for %d us)\n", val,
desc->timer_delay_us);
/* Program Consecutive Up & Down */
val = desc->timer_cons_down << RBIF_TIMER_ADJ_CONS_DOWN_SHIFT;
val |= desc->timer_cons_up << RBIF_TIMER_ADJ_CONS_UP_SHIFT;
val |= desc->clamp_timer_interval << RBIF_TIMER_ADJ_CLAMP_INT_SHIFT;
cpr_write(drv, REG_RBIF_TIMER_ADJUST, val);
/* Program the control register */
val = desc->up_threshold << RBCPR_CTL_UP_THRESHOLD_SHIFT;
val |= desc->down_threshold << RBCPR_CTL_DN_THRESHOLD_SHIFT;
val |= RBCPR_CTL_TIMER_EN | RBCPR_CTL_COUNT_MODE;
val |= RBCPR_CTL_SW_AUTO_CONT_ACK_EN;
cpr_write(drv, REG_RBCPR_CTL, val);
for (i = 0; i < drv->num_corners; i++) {
corner = &drv->corners[i];
corner->save_ctl = val;
corner->save_irq = CPR_INT_DEFAULT;
}
cpr_irq_set(drv, CPR_INT_DEFAULT);
val = cpr_read(drv, REG_RBCPR_VERSION); if (val <= RBCPR_VER_2)
drv->flags |= FLAGS_IGNORE_1ST_IRQ_STATUS;
/* * Determine new corner we're going to. * Remove one since lowest performance state is 1.
*/
corner = drv->corners + state - 1;
end = &drv->corners[drv->num_corners - 1]; if (corner > end || corner < drv->corners) return -EINVAL;
/* Determine direction */ if (drv->corner > corner)
dir = DOWN; elseif (drv->corner < corner)
dir = UP; else
dir = NO_CHANGE;
if (cpr_is_allowed(drv))
new_uV = corner->last_uV; else
new_uV = corner->uV;
if (cpr_is_allowed(drv))
cpr_ctl_disable(drv);
ret = cpr_scale_voltage(drv, corner, new_uV, dir); if (ret) return ret;
if (cpr_is_allowed(drv)) {
cpr_irq_clr(drv); if (drv->corner != corner)
cpr_corner_restore(drv, corner);
cpr_ctl_enable(drv, corner);
}
step_volt = regulator_get_linear_step(drv->vdd_apc); if (!step_volt) return -EINVAL;
/* Populate fuse_corner members */
fuse = drv->fuse_corners;
end = &fuse[desc->num_fuse_corners - 1];
fdata = desc->cpr_fuses.fuse_corner_data;
for (i = 0; fuse <= end; fuse++, fuses++, i++, fdata++) { /* * Update SoC voltages: platforms might choose a different * regulators than the one used to characterize the algorithms * (ie, init_voltage_step).
*/
fdata->min_uV = roundup(fdata->min_uV, step_volt);
fdata->max_uV = roundup(fdata->max_uV, step_volt);
if (fuse == end) { /* * Allow the highest fuse corner's PVS voltage to * define the ceiling voltage for that corner in order * to support SoC's in which variable ceiling values * are required.
*/
end->max_uV = max(end->max_uV, end->uV);
}
/* Populate target quotient by scaling */
ret = nvmem_cell_read_variable_le_u32(drv->dev, fuses->quotient, &fuse->quot); if (ret) return ret;
/* * Don't interpolate in the wrong direction. This could happen * if the adjusted fuse voltage overlaps with the previous fuse's * adjusted voltage.
*/ if (f_high <= f_low || uV_high <= uV_low || f_high <= corner->freq) return corner->uV;
/* * max_volt_scale has units of uV/MHz while freq values * have units of Hz. Divide by 1000000 to convert to.
*/
temp_limit = f_diff * fdata->max_volt_scale;
do_div(temp_limit, 1000000);
/* * Store maximum frequency for each fuse corner based on the frequency * plan
*/ for (level = 1; level <= drv->num_corners; level++) {
opp = dev_pm_opp_find_level_exact(&drv->pd.dev, level); if (IS_ERR(opp)) return -EINVAL;
fc = cpr_get_fuse_corner(opp); if (!fc) {
dev_pm_opp_put(opp); return -EINVAL;
}
fnum = fc - 1;
freq = cpr_get_opp_hz_for_req(opp, drv->attached_cpu_dev); if (!freq) {
dev_pm_opp_put(opp); return -EINVAL;
}
cdata[level - 1].fuse_corner = fnum;
cdata[level - 1].freq = freq;
/* * Get the quotient adjustment scaling factor, according to: * * scaling = min(1000 * (QUOT(corner_N) - QUOT(corner_N-1)) * / (freq(corner_N) - freq(corner_N-1)), max_factor) * * QUOT(corner_N): quotient read from fuse for fuse corner N * QUOT(corner_N-1): quotient read from fuse for fuse corner (N - 1) * freq(corner_N): max frequency in MHz supported by fuse corner N * freq(corner_N-1): max frequency in MHz supported by fuse corner * (N - 1) * * Then walk through the corners mapped to each fuse corner * and calculate the quotient adjustment for each one using the * following formula: * * quot_adjust = (freq_max - freq_corner) * scaling / 1000 * * freq_max: max frequency in MHz supported by the fuse corner * freq_corner: frequency in MHz corresponding to the corner * scaling: calculated from above equation * * * + + * | v | * q | f c o | f c * u | c l | c * o | f t | f * t | c a | c * | c f g | c f * | e | * +--------------- +---------------- * 0 1 2 3 4 5 6 0 1 2 3 4 5 6 * corner corner * * c = corner * f = fuse corner *
*/ for (apply_scaling = false, i = 0; corner <= end; corner++, i++) {
fnum = cdata[i].fuse_corner;
fdata = &desc->cpr_fuses.fuse_corner_data[fnum];
quot_offset = fuses[fnum].quotient_offset;
fuse = &drv->fuse_corners[fnum]; if (fnum)
prev_fuse = &drv->fuse_corners[fnum - 1]; else
prev_fuse = NULL;
if (!drv->cpu_clk) {
dev_err(drv->dev, "cannot get rate from NULL clk\n"); return -EINVAL;
}
end = &drv->corners[drv->num_corners - 1];
rate = clk_get_rate(drv->cpu_clk);
/* * Some bootloaders set a CPU clock frequency that is not defined * in the OPP table. When running at an unlisted frequency, * cpufreq_online() will change to the OPP which has the lowest * frequency, at or above the unlisted frequency. * Since cpufreq_online() always "rounds up" in the case of an * unlisted frequency, this function always "rounds down" in case * of an unlisted frequency. That way, when cpufreq_online() * triggers the first ever call to cpr_set_performance_state(), * it will correctly determine the direction as UP.
*/ for (iter = drv->corners; iter <= end; iter++) { if (iter->freq > rate) break;
i++; if (iter->freq == rate) {
drv->corner = iter; break;
} if (iter->freq < rate)
drv->corner = iter;
}
if (!drv->corner) {
dev_err(drv->dev, "boot up corner not found\n"); return -EINVAL;
}
/* * This driver only supports scaling voltage for a CPU cluster * where all CPUs in the cluster share a single regulator. * Therefore, save the struct device pointer only for the first * CPU device that gets attached. There is no need to do any * additional initialization when further CPUs get attached.
*/ if (drv->attached_cpu_dev) return 0;
/* * cpr_scale_voltage() requires the direction (if we are changing * to a higher or lower OPP). The first time * cpr_set_performance_state() is called, there is no previous * performance state defined. Therefore, we call * cpr_find_initial_corner() that gets the CPU clock frequency * set by the bootloader, so that we can determine the direction * the first time cpr_set_performance_state() is called.
*/
drv->cpu_clk = devm_clk_get(dev, NULL); if (IS_ERR(drv->cpu_clk)) return dev_err_probe(drv->dev, PTR_ERR(drv->cpu_clk), "could not get cpu clk\n");
drv->attached_cpu_dev = dev;
dev_dbg(drv->dev, "using cpu clk from: %s\n",
dev_name(drv->attached_cpu_dev));
/* * Everything related to (virtual) corners has to be initialized * here, when attaching to the power domain, since we need to know * the maximum frequency for each fuse corner, and this is only * available after the cpufreq driver has attached to us. * The reason for this is that we need to know the highest * frequency associated with each fuse corner.
*/
ret = dev_pm_opp_get_opp_count(&drv->pd.dev); if (ret < 0) {
dev_err(drv->dev, "could not get OPP count\n"); return ret;
}
drv->num_corners = ret;
if (drv->num_corners < 2) {
dev_err(drv->dev, "need at least 2 OPPs to use CPR\n"); return -EINVAL;
}
drv->corners = devm_kcalloc(drv->dev, drv->num_corners, sizeof(*drv->corners),
GFP_KERNEL); if (!drv->corners) return -ENOMEM;
ret = cpr_corner_init(drv); if (ret) return ret;
cpr_set_loop_allowed(drv);
ret = cpr_init_parameters(drv); if (ret) return ret;
/* Configure CPR HW but keep it disabled */
ret = cpr_config(drv); if (ret) return ret;
ret = cpr_find_initial_corner(drv); if (ret) return ret;
if (acc_desc->config)
regmap_multi_reg_write(drv->tcsr, acc_desc->config,
acc_desc->num_regs_per_fuse);
/* Enable ACC if required */ if (acc_desc->enable_mask)
regmap_update_bits(drv->tcsr, acc_desc->enable_reg,
acc_desc->enable_mask,
acc_desc->enable_mask);
dev_info(drv->dev, "driver initialized with %u OPPs\n",
drv->num_corners);
drv->fuse_corners = devm_kcalloc(dev, drv->desc->num_fuse_corners, sizeof(*drv->fuse_corners),
GFP_KERNEL); if (!drv->fuse_corners) return -ENOMEM;
np = of_parse_phandle(dev->of_node, "acc-syscon", 0); if (!np) return -ENODEV;
drv->tcsr = syscon_node_to_regmap(np);
of_node_put(np); if (IS_ERR(drv->tcsr)) return PTR_ERR(drv->tcsr);
drv->base = devm_platform_ioremap_resource(pdev, 0); if (IS_ERR(drv->base)) return PTR_ERR(drv->base);
irq = platform_get_irq(pdev, 0); if (irq < 0) return -EINVAL;
drv->vdd_apc = devm_regulator_get(dev, "vdd-apc"); if (IS_ERR(drv->vdd_apc)) return PTR_ERR(drv->vdd_apc);
/* * Initialize fuse corners, since it simply depends * on data in efuses. * Everything related to (virtual) corners has to be * initialized after attaching to the power domain, * since it depends on the CPU's OPP table.
*/
ret = nvmem_cell_read_variable_le_u32(dev, "cpr_fuse_revision", &cpr_rev); if (ret) return ret;
drv->cpr_fuses = cpr_get_fuses(drv); if (IS_ERR(drv->cpr_fuses)) return PTR_ERR(drv->cpr_fuses);
ret = cpr_populate_ring_osc_idx(drv); if (ret) return ret;
ret = cpr_fuse_corner_init(drv); if (ret) return ret;
mutex_init(&drv->lock);
ret = devm_request_threaded_irq(dev, irq, NULL,
cpr_irq_handler,
IRQF_ONESHOT | IRQF_TRIGGER_RISING, "cpr", drv); if (ret) return ret;
drv->pd.name = devm_kstrdup_const(dev, dev->of_node->full_name,
GFP_KERNEL); if (!drv->pd.name) return -EINVAL;
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.