2 * AD7792/AD7793 SPI ADC driver
4 * Copyright 2011 Analog Devices Inc.
6 * Licensed under the GPL-2.
9 #include <linux/interrupt.h>
10 #include <linux/device.h>
11 #include <linux/kernel.h>
12 #include <linux/slab.h>
13 #include <linux/sysfs.h>
14 #include <linux/spi/spi.h>
15 #include <linux/regulator/consumer.h>
16 #include <linux/err.h>
17 #include <linux/sched.h>
18 #include <linux/delay.h>
22 #include "../ring_generic.h"
23 #include "../ring_sw.h"
24 #include "../trigger.h"
25 #include "../trigger_consumer.h"
30 * The AD7792/AD7793 features a dual use data out ready DOUT/RDY output.
31 * In order to avoid contentions on the SPI bus, it's therefore necessary
32 * to use spi bus locking.
34 * The DOUT/RDY output must also be wired to an interrupt capable GPIO.
37 struct ad7793_chip_info {
38 struct iio_chan_spec channel[7];
42 struct spi_device *spi;
43 struct iio_trigger *trig;
44 const struct ad7793_chip_info *chip_info;
45 struct regulator *reg;
46 struct ad7793_platform_data *pdata;
47 wait_queue_head_t wq_data_avail;
53 u32 scale_avail[8][2];
54 u32 available_scan_masks[7];
56 * DMA (thus cache coherency maintenance) requires the
57 * transfer buffers to live in their own cache lines.
59 u8 data[4] ____cacheline_aligned;
62 enum ad7793_supported_device_ids {
67 static int __ad7793_write_reg(struct ad7793_state *st, bool locked,
68 bool cs_change, unsigned char reg,
69 unsigned size, unsigned val)
72 struct spi_transfer t = {
75 .cs_change = cs_change,
79 data[0] = AD7793_COMM_WRITE | AD7793_COMM_ADDR(reg);
99 spi_message_add_tail(&t, &m);
102 return spi_sync_locked(st->spi, &m);
104 return spi_sync(st->spi, &m);
107 static int ad7793_write_reg(struct ad7793_state *st,
108 unsigned reg, unsigned size, unsigned val)
110 return __ad7793_write_reg(st, false, false, reg, size, val);
113 static int __ad7793_read_reg(struct ad7793_state *st, bool locked,
114 bool cs_change, unsigned char reg,
115 int *val, unsigned size)
119 struct spi_transfer t[] = {
126 .cs_change = cs_change,
129 struct spi_message m;
131 data[0] = AD7793_COMM_READ | AD7793_COMM_ADDR(reg);
133 spi_message_init(&m);
134 spi_message_add_tail(&t[0], &m);
135 spi_message_add_tail(&t[1], &m);
138 ret = spi_sync_locked(st->spi, &m);
140 ret = spi_sync(st->spi, &m);
147 *val = data[0] << 16 | data[1] << 8 | data[2];
150 *val = data[0] << 8 | data[1];
162 static int ad7793_read_reg(struct ad7793_state *st,
163 unsigned reg, int *val, unsigned size)
165 return __ad7793_read_reg(st, 0, 0, reg, val, size);
168 static int ad7793_read(struct ad7793_state *st, unsigned ch,
169 unsigned len, int *val)
172 st->conf = (st->conf & ~AD7793_CONF_CHAN(-1)) | AD7793_CONF_CHAN(ch);
173 st->mode = (st->mode & ~AD7793_MODE_SEL(-1)) |
174 AD7793_MODE_SEL(AD7793_MODE_SINGLE);
176 ad7793_write_reg(st, AD7793_REG_CONF, sizeof(st->conf), st->conf);
178 spi_bus_lock(st->spi->master);
181 ret = __ad7793_write_reg(st, 1, 1, AD7793_REG_MODE,
182 sizeof(st->mode), st->mode);
187 enable_irq(st->spi->irq);
188 wait_event_interruptible(st->wq_data_avail, st->done);
190 ret = __ad7793_read_reg(st, 1, 0, AD7793_REG_DATA, val, len);
192 spi_bus_unlock(st->spi->master);
197 static int ad7793_calibrate(struct ad7793_state *st, unsigned mode, unsigned ch)
201 st->conf = (st->conf & ~AD7793_CONF_CHAN(-1)) | AD7793_CONF_CHAN(ch);
202 st->mode = (st->mode & ~AD7793_MODE_SEL(-1)) | AD7793_MODE_SEL(mode);
204 ad7793_write_reg(st, AD7793_REG_CONF, sizeof(st->conf), st->conf);
206 spi_bus_lock(st->spi->master);
209 ret = __ad7793_write_reg(st, 1, 1, AD7793_REG_MODE,
210 sizeof(st->mode), st->mode);
215 enable_irq(st->spi->irq);
216 wait_event_interruptible(st->wq_data_avail, st->done);
218 st->mode = (st->mode & ~AD7793_MODE_SEL(-1)) |
219 AD7793_MODE_SEL(AD7793_MODE_IDLE);
221 ret = __ad7793_write_reg(st, 1, 0, AD7793_REG_MODE,
222 sizeof(st->mode), st->mode);
224 spi_bus_unlock(st->spi->master);
229 static const u8 ad7793_calib_arr[6][2] = {
230 {AD7793_MODE_CAL_INT_ZERO, AD7793_CH_AIN1P_AIN1M},
231 {AD7793_MODE_CAL_INT_FULL, AD7793_CH_AIN1P_AIN1M},
232 {AD7793_MODE_CAL_INT_ZERO, AD7793_CH_AIN2P_AIN2M},
233 {AD7793_MODE_CAL_INT_FULL, AD7793_CH_AIN2P_AIN2M},
234 {AD7793_MODE_CAL_INT_ZERO, AD7793_CH_AIN3P_AIN3M},
235 {AD7793_MODE_CAL_INT_FULL, AD7793_CH_AIN3P_AIN3M}
238 static int ad7793_calibrate_all(struct ad7793_state *st)
242 for (i = 0; i < ARRAY_SIZE(ad7793_calib_arr); i++) {
243 ret = ad7793_calibrate(st, ad7793_calib_arr[i][0],
244 ad7793_calib_arr[i][1]);
251 dev_err(&st->spi->dev, "Calibration failed\n");
255 static int ad7793_setup(struct ad7793_state *st)
258 unsigned long long scale_uv;
261 /* reset the serial interface */
262 ret = spi_write(st->spi, (u8 *)&ret, sizeof(ret));
265 msleep(1); /* Wait for at least 500us */
267 /* write/read test for device presence */
268 ret = ad7793_read_reg(st, AD7793_REG_ID, &id, 1);
272 id &= AD7793_ID_MASK;
274 if (!((id == AD7792_ID) || (id == AD7793_ID))) {
275 dev_err(&st->spi->dev, "device ID query failed\n");
279 st->mode = (st->pdata->mode & ~AD7793_MODE_SEL(-1)) |
280 AD7793_MODE_SEL(AD7793_MODE_IDLE);
281 st->conf = st->pdata->conf & ~AD7793_CONF_CHAN(-1);
283 ret = ad7793_write_reg(st, AD7793_REG_MODE, sizeof(st->mode), st->mode);
287 ret = ad7793_write_reg(st, AD7793_REG_CONF, sizeof(st->conf), st->conf);
291 ret = ad7793_write_reg(st, AD7793_REG_IO,
292 sizeof(st->pdata->io), st->pdata->io);
296 ret = ad7793_calibrate_all(st);
300 /* Populate available ADC input ranges */
301 for (i = 0; i < ARRAY_SIZE(st->scale_avail); i++) {
302 scale_uv = ((u64)st->int_vref_mv * 100000000)
303 >> (st->chip_info->channel[0].scan_type.realbits -
304 (!!(st->conf & AD7793_CONF_UNIPOLAR) ? 0 : 1));
307 st->scale_avail[i][1] = do_div(scale_uv, 100000000) * 10;
308 st->scale_avail[i][0] = scale_uv;
313 dev_err(&st->spi->dev, "setup failed\n");
317 static int ad7793_scan_from_ring(struct ad7793_state *st, unsigned ch, int *val)
319 struct iio_ring_buffer *ring = iio_priv_to_dev(st)->ring;
322 u32 *dat32 = (u32 *)dat64;
324 if (!(ring->scan_mask & (1 << ch)))
327 ret = ring->access->read_last(ring, (u8 *) &dat64);
336 static int ad7793_ring_preenable(struct iio_dev *indio_dev)
338 struct ad7793_state *st = iio_priv(indio_dev);
339 struct iio_ring_buffer *ring = indio_dev->ring;
343 if (!ring->scan_count)
346 channel = __ffs(ring->scan_mask);
348 d_size = ring->scan_count *
349 indio_dev->channels[0].scan_type.storagebits / 8;
351 if (ring->scan_timestamp) {
352 d_size += sizeof(s64);
354 if (d_size % sizeof(s64))
355 d_size += sizeof(s64) - (d_size % sizeof(s64));
358 if (indio_dev->ring->access->set_bytes_per_datum)
359 indio_dev->ring->access->set_bytes_per_datum(indio_dev->ring,
362 st->mode = (st->mode & ~AD7793_MODE_SEL(-1)) |
363 AD7793_MODE_SEL(AD7793_MODE_CONT);
364 st->conf = (st->conf & ~AD7793_CONF_CHAN(-1)) |
365 AD7793_CONF_CHAN(indio_dev->channels[channel].address);
367 ad7793_write_reg(st, AD7793_REG_CONF, sizeof(st->conf), st->conf);
369 spi_bus_lock(st->spi->master);
370 __ad7793_write_reg(st, 1, 1, AD7793_REG_MODE,
371 sizeof(st->mode), st->mode);
374 enable_irq(st->spi->irq);
379 static int ad7793_ring_postdisable(struct iio_dev *indio_dev)
381 struct ad7793_state *st = iio_priv(indio_dev);
383 st->mode = (st->mode & ~AD7793_MODE_SEL(-1)) |
384 AD7793_MODE_SEL(AD7793_MODE_IDLE);
387 wait_event_interruptible(st->wq_data_avail, st->done);
390 disable_irq_nosync(st->spi->irq);
392 __ad7793_write_reg(st, 1, 0, AD7793_REG_MODE,
393 sizeof(st->mode), st->mode);
395 return spi_bus_unlock(st->spi->master);
399 * ad7793_trigger_handler() bh of trigger launched polling to ring buffer
402 static irqreturn_t ad7793_trigger_handler(int irq, void *p)
404 struct iio_poll_func *pf = p;
405 struct iio_dev *indio_dev = pf->indio_dev;
406 struct iio_ring_buffer *ring = indio_dev->ring;
407 struct ad7793_state *st = iio_priv(indio_dev);
409 s32 *dat32 = (s32 *)dat64;
411 if (ring->scan_count)
412 __ad7793_read_reg(st, 1, 1, AD7793_REG_DATA,
414 indio_dev->channels[0].scan_type.realbits/8);
416 /* Guaranteed to be aligned with 8 byte boundary */
417 if (ring->scan_timestamp)
418 dat64[1] = pf->timestamp;
420 ring->access->store_to(ring, (u8 *)dat64, pf->timestamp);
422 iio_trigger_notify_done(indio_dev->trig);
424 enable_irq(st->spi->irq);
429 static const struct iio_ring_setup_ops ad7793_ring_setup_ops = {
430 .preenable = &ad7793_ring_preenable,
431 .postenable = &iio_triggered_ring_postenable,
432 .predisable = &iio_triggered_ring_predisable,
433 .postdisable = &ad7793_ring_postdisable,
436 static int ad7793_register_ring_funcs_and_init(struct iio_dev *indio_dev)
440 indio_dev->ring = iio_sw_rb_allocate(indio_dev);
441 if (!indio_dev->ring) {
445 /* Effectively select the ring buffer implementation */
446 indio_dev->ring->access = &ring_sw_access_funcs;
447 indio_dev->pollfunc = iio_alloc_pollfunc(&iio_pollfunc_store_time,
448 &ad7793_trigger_handler,
453 if (indio_dev->pollfunc == NULL) {
455 goto error_deallocate_sw_rb;
458 /* Ring buffer functions - here trigger setup related */
459 indio_dev->ring->setup_ops = &ad7793_ring_setup_ops;
461 /* Flag that polled ring buffering is possible */
462 indio_dev->modes |= INDIO_RING_TRIGGERED;
465 error_deallocate_sw_rb:
466 iio_sw_rb_free(indio_dev->ring);
471 static void ad7793_ring_cleanup(struct iio_dev *indio_dev)
473 iio_dealloc_pollfunc(indio_dev->pollfunc);
474 iio_sw_rb_free(indio_dev->ring);
478 * ad7793_data_rdy_trig_poll() the event handler for the data rdy trig
480 static irqreturn_t ad7793_data_rdy_trig_poll(int irq, void *private)
482 struct ad7793_state *st = iio_priv(private);
485 wake_up_interruptible(&st->wq_data_avail);
486 disable_irq_nosync(irq);
488 iio_trigger_poll(st->trig, iio_get_time_ns());
493 static int ad7793_probe_trigger(struct iio_dev *indio_dev)
495 struct ad7793_state *st = iio_priv(indio_dev);
498 st->trig = iio_allocate_trigger("%s-dev%d",
499 spi_get_device_id(st->spi)->name,
501 if (st->trig == NULL) {
506 ret = request_irq(st->spi->irq,
507 ad7793_data_rdy_trig_poll,
509 spi_get_device_id(st->spi)->name,
512 goto error_free_trig;
514 disable_irq_nosync(st->spi->irq);
516 st->trig->dev.parent = &st->spi->dev;
517 st->trig->owner = THIS_MODULE;
518 st->trig->private_data = indio_dev;
520 ret = iio_trigger_register(st->trig);
522 /* select default trigger */
523 indio_dev->trig = st->trig;
530 free_irq(st->spi->irq, indio_dev);
532 iio_free_trigger(st->trig);
537 static void ad7793_remove_trigger(struct iio_dev *indio_dev)
539 struct ad7793_state *st = iio_priv(indio_dev);
541 iio_trigger_unregister(st->trig);
542 free_irq(st->spi->irq, indio_dev);
543 iio_free_trigger(st->trig);
546 static const u16 sample_freq_avail[16] = {0, 470, 242, 123, 62, 50, 39, 33, 19,
547 17, 16, 12, 10, 8, 6, 4};
549 static ssize_t ad7793_read_frequency(struct device *dev,
550 struct device_attribute *attr,
553 struct iio_dev *indio_dev = dev_get_drvdata(dev);
554 struct ad7793_state *st = iio_priv(indio_dev);
556 return sprintf(buf, "%d\n",
557 sample_freq_avail[AD7793_MODE_RATE(st->mode)]);
560 static ssize_t ad7793_write_frequency(struct device *dev,
561 struct device_attribute *attr,
565 struct iio_dev *indio_dev = dev_get_drvdata(dev);
566 struct ad7793_state *st = iio_priv(indio_dev);
570 mutex_lock(&indio_dev->mlock);
571 if (iio_ring_enabled(indio_dev)) {
572 mutex_unlock(&indio_dev->mlock);
575 mutex_unlock(&indio_dev->mlock);
577 ret = strict_strtol(buf, 10, &lval);
583 for (i = 0; i < ARRAY_SIZE(sample_freq_avail); i++)
584 if (lval == sample_freq_avail[i]) {
585 mutex_lock(&indio_dev->mlock);
586 st->mode &= ~AD7793_MODE_RATE(-1);
587 st->mode |= AD7793_MODE_RATE(i);
588 ad7793_write_reg(st, AD7793_REG_MODE,
589 sizeof(st->mode), st->mode);
590 mutex_unlock(&indio_dev->mlock);
594 return ret ? ret : len;
597 static IIO_DEV_ATTR_SAMP_FREQ(S_IWUSR | S_IRUGO,
598 ad7793_read_frequency,
599 ad7793_write_frequency);
601 static IIO_CONST_ATTR_SAMP_FREQ_AVAIL(
602 "470 242 123 62 50 39 33 19 17 16 12 10 8 6 4");
604 static ssize_t ad7793_show_scale_available(struct device *dev,
605 struct device_attribute *attr, char *buf)
607 struct iio_dev *indio_dev = dev_get_drvdata(dev);
608 struct ad7793_state *st = iio_priv(indio_dev);
611 for (i = 0; i < ARRAY_SIZE(st->scale_avail); i++)
612 len += sprintf(buf + len, "%d.%09u ", st->scale_avail[i][0],
613 st->scale_avail[i][1]);
615 len += sprintf(buf + len, "\n");
620 static IIO_DEVICE_ATTR_NAMED(in_m_in_scale_available, in-in_scale_available,
621 S_IRUGO, ad7793_show_scale_available, NULL, 0);
623 static struct attribute *ad7793_attributes[] = {
624 &iio_dev_attr_sampling_frequency.dev_attr.attr,
625 &iio_const_attr_sampling_frequency_available.dev_attr.attr,
626 &iio_dev_attr_in_m_in_scale_available.dev_attr.attr,
630 static const struct attribute_group ad7793_attribute_group = {
631 .attrs = ad7793_attributes,
634 static int ad7793_read_raw(struct iio_dev *indio_dev,
635 struct iio_chan_spec const *chan,
640 struct ad7793_state *st = iio_priv(indio_dev);
642 unsigned long long scale_uv;
643 bool unipolar = !!(st->conf & AD7793_CONF_UNIPOLAR);
647 mutex_lock(&indio_dev->mlock);
648 if (iio_ring_enabled(indio_dev))
649 ret = ad7793_scan_from_ring(st,
650 chan->scan_index, &smpl);
652 ret = ad7793_read(st, chan->address,
653 chan->scan_type.realbits / 8, &smpl);
654 mutex_unlock(&indio_dev->mlock);
659 *val = (smpl >> chan->scan_type.shift) &
660 ((1 << (chan->scan_type.realbits)) - 1);
663 *val -= (1 << (chan->scan_type.realbits - 1));
667 case (1 << IIO_CHAN_INFO_SCALE_SHARED):
668 *val = st->scale_avail[(st->conf >> 8) & 0x7][0];
669 *val2 = st->scale_avail[(st->conf >> 8) & 0x7][1];
671 return IIO_VAL_INT_PLUS_NANO;
673 case (1 << IIO_CHAN_INFO_SCALE_SEPARATE):
674 switch (chan->type) {
676 /* 1170mV / 2^23 * 6 */
677 scale_uv = (1170ULL * 100000000ULL * 6ULL)
678 >> (chan->scan_type.realbits -
682 /* Always uses unity gain and internal ref */
683 scale_uv = (2500ULL * 100000000ULL)
684 >> (chan->scan_type.realbits -
691 *val2 = do_div(scale_uv, 100000000) * 10;
694 return IIO_VAL_INT_PLUS_NANO;
699 static int ad7793_write_raw(struct iio_dev *indio_dev,
700 struct iio_chan_spec const *chan,
705 struct ad7793_state *st = iio_priv(indio_dev);
709 mutex_lock(&indio_dev->mlock);
710 if (iio_ring_enabled(indio_dev)) {
711 mutex_unlock(&indio_dev->mlock);
716 case (1 << IIO_CHAN_INFO_SCALE_SHARED):
718 for (i = 0; i < ARRAY_SIZE(st->scale_avail); i++)
719 if (val2 == st->scale_avail[i][1]) {
721 st->conf &= ~AD7793_CONF_GAIN(-1);
722 st->conf |= AD7793_CONF_GAIN(i);
724 if (tmp != st->conf) {
725 ad7793_write_reg(st, AD7793_REG_CONF,
728 ad7793_calibrate_all(st);
737 mutex_unlock(&indio_dev->mlock);
741 static int ad7793_validate_trigger(struct iio_dev *indio_dev,
742 struct iio_trigger *trig)
744 if (indio_dev->trig != trig)
750 static int ad7793_write_raw_get_fmt(struct iio_dev *indio_dev,
751 struct iio_chan_spec const *chan,
754 return IIO_VAL_INT_PLUS_NANO;
757 static const struct iio_info ad7793_info = {
758 .read_raw = &ad7793_read_raw,
759 .write_raw = &ad7793_write_raw,
760 .write_raw_get_fmt = &ad7793_write_raw_get_fmt,
761 .attrs = &ad7793_attribute_group,
762 .validate_trigger = ad7793_validate_trigger,
763 .driver_module = THIS_MODULE,
766 static const struct ad7793_chip_info ad7793_chip_info_tbl[] = {
768 .channel[0] = IIO_CHAN(IIO_IN_DIFF, 0, 1, 0, NULL, 0, 0,
769 (1 << IIO_CHAN_INFO_SCALE_SHARED),
770 AD7793_CH_AIN1P_AIN1M,
771 0, IIO_ST('s', 24, 32, 0), 0),
772 .channel[1] = IIO_CHAN(IIO_IN_DIFF, 0, 1, 0, NULL, 1, 1,
773 (1 << IIO_CHAN_INFO_SCALE_SHARED),
774 AD7793_CH_AIN2P_AIN2M,
775 1, IIO_ST('s', 24, 32, 0), 0),
776 .channel[2] = IIO_CHAN(IIO_IN_DIFF, 0, 1, 0, NULL, 2, 2,
777 (1 << IIO_CHAN_INFO_SCALE_SHARED),
778 AD7793_CH_AIN3P_AIN3M,
779 2, IIO_ST('s', 24, 32, 0), 0),
780 .channel[3] = IIO_CHAN(IIO_IN_DIFF, 0, 1, 0, "shorted", 0, 0,
781 (1 << IIO_CHAN_INFO_SCALE_SHARED),
782 AD7793_CH_AIN1M_AIN1M,
783 3, IIO_ST('s', 24, 32, 0), 0),
784 .channel[4] = IIO_CHAN(IIO_TEMP, 0, 1, 0, NULL, 0, 0,
785 (1 << IIO_CHAN_INFO_SCALE_SEPARATE),
787 4, IIO_ST('s', 24, 32, 0), 0),
788 .channel[5] = IIO_CHAN(IIO_IN, 0, 1, 0, "supply", 4, 0,
789 (1 << IIO_CHAN_INFO_SCALE_SEPARATE),
790 AD7793_CH_AVDD_MONITOR,
791 5, IIO_ST('s', 24, 32, 0), 0),
792 .channel[6] = IIO_CHAN_SOFT_TIMESTAMP(6),
795 .channel[0] = IIO_CHAN(IIO_IN_DIFF, 0, 1, 0, NULL, 0, 0,
796 (1 << IIO_CHAN_INFO_SCALE_SHARED),
797 AD7793_CH_AIN1P_AIN1M,
798 0, IIO_ST('s', 16, 32, 0), 0),
799 .channel[1] = IIO_CHAN(IIO_IN_DIFF, 0, 1, 0, NULL, 1, 1,
800 (1 << IIO_CHAN_INFO_SCALE_SHARED),
801 AD7793_CH_AIN2P_AIN2M,
802 1, IIO_ST('s', 16, 32, 0), 0),
803 .channel[2] = IIO_CHAN(IIO_IN_DIFF, 0, 1, 0, NULL, 2, 2,
804 (1 << IIO_CHAN_INFO_SCALE_SHARED),
805 AD7793_CH_AIN3P_AIN3M,
806 2, IIO_ST('s', 16, 32, 0), 0),
807 .channel[3] = IIO_CHAN(IIO_IN_DIFF, 0, 1, 0, "shorted", 0, 0,
808 (1 << IIO_CHAN_INFO_SCALE_SHARED),
809 AD7793_CH_AIN1M_AIN1M,
810 3, IIO_ST('s', 16, 32, 0), 0),
811 .channel[4] = IIO_CHAN(IIO_TEMP, 0, 1, 0, NULL, 0, 0,
812 (1 << IIO_CHAN_INFO_SCALE_SEPARATE),
814 4, IIO_ST('s', 16, 32, 0), 0),
815 .channel[5] = IIO_CHAN(IIO_IN, 0, 1, 0, "supply", 4, 0,
816 (1 << IIO_CHAN_INFO_SCALE_SEPARATE),
817 AD7793_CH_AVDD_MONITOR,
818 5, IIO_ST('s', 16, 32, 0), 0),
819 .channel[6] = IIO_CHAN_SOFT_TIMESTAMP(6),
823 static int __devinit ad7793_probe(struct spi_device *spi)
825 struct ad7793_platform_data *pdata = spi->dev.platform_data;
826 struct ad7793_state *st;
827 struct iio_dev *indio_dev;
828 int ret, i, voltage_uv = 0, regdone = 0;
831 dev_err(&spi->dev, "no platform data?\n");
836 dev_err(&spi->dev, "no IRQ?\n");
840 indio_dev = iio_allocate_device(sizeof(*st));
841 if (indio_dev == NULL)
844 st = iio_priv(indio_dev);
846 st->reg = regulator_get(&spi->dev, "vcc");
847 if (!IS_ERR(st->reg)) {
848 ret = regulator_enable(st->reg);
852 voltage_uv = regulator_get_voltage(st->reg);
856 &ad7793_chip_info_tbl[spi_get_device_id(spi)->driver_data];
860 if (pdata && pdata->vref_mv)
861 st->int_vref_mv = pdata->vref_mv;
863 st->int_vref_mv = voltage_uv / 1000;
865 st->int_vref_mv = 2500; /* Build-in ref */
867 spi_set_drvdata(spi, indio_dev);
870 indio_dev->dev.parent = &spi->dev;
871 indio_dev->name = spi_get_device_id(spi)->name;
872 indio_dev->modes = INDIO_DIRECT_MODE;
873 indio_dev->channels = st->chip_info->channel;
874 indio_dev->available_scan_masks = st->available_scan_masks;
875 indio_dev->num_channels = 7;
876 indio_dev->info = &ad7793_info;
878 for (i = 0; i < indio_dev->num_channels; i++)
879 st->available_scan_masks[i] = (1 << i) | (1 <<
880 indio_dev->channels[indio_dev->num_channels - 1].
883 init_waitqueue_head(&st->wq_data_avail);
885 ret = ad7793_register_ring_funcs_and_init(indio_dev);
887 goto error_disable_reg;
889 ret = iio_device_register(indio_dev);
891 goto error_unreg_ring;
894 ret = ad7793_probe_trigger(indio_dev);
896 goto error_unreg_ring;
898 ret = iio_ring_buffer_register(indio_dev,
900 indio_dev->num_channels);
902 goto error_remove_trigger;
904 ret = ad7793_setup(st);
906 goto error_uninitialize_ring;
910 error_uninitialize_ring:
911 iio_ring_buffer_unregister(indio_dev);
912 error_remove_trigger:
913 ad7793_remove_trigger(indio_dev);
915 ad7793_ring_cleanup(indio_dev);
917 if (!IS_ERR(st->reg))
918 regulator_disable(st->reg);
920 if (!IS_ERR(st->reg))
921 regulator_put(st->reg);
924 iio_device_unregister(indio_dev);
926 iio_free_device(indio_dev);
931 static int ad7793_remove(struct spi_device *spi)
933 struct iio_dev *indio_dev = spi_get_drvdata(spi);
934 struct ad7793_state *st = iio_priv(indio_dev);
936 iio_ring_buffer_unregister(indio_dev);
937 ad7793_remove_trigger(indio_dev);
938 ad7793_ring_cleanup(indio_dev);
940 if (!IS_ERR(st->reg)) {
941 regulator_disable(st->reg);
942 regulator_put(st->reg);
945 iio_device_unregister(indio_dev);
950 static const struct spi_device_id ad7793_id[] = {
951 {"ad7792", ID_AD7792},
952 {"ad7793", ID_AD7793},
956 static struct spi_driver ad7793_driver = {
959 .bus = &spi_bus_type,
960 .owner = THIS_MODULE,
962 .probe = ad7793_probe,
963 .remove = __devexit_p(ad7793_remove),
964 .id_table = ad7793_id,
967 static int __init ad7793_init(void)
969 return spi_register_driver(&ad7793_driver);
971 module_init(ad7793_init);
973 static void __exit ad7793_exit(void)
975 spi_unregister_driver(&ad7793_driver);
977 module_exit(ad7793_exit);
979 MODULE_AUTHOR("Michael Hennerich <hennerich@blackfin.uclinux.org>");
980 MODULE_DESCRIPTION("Analog Devices AD7792/3 ADC");
981 MODULE_LICENSE("GPL v2");